diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs
index 9bc33cfa80a..3f60b1cca2f 100644
--- a/crates/ra_hir_expand/src/builtin_macro.rs
+++ b/crates/ra_hir_expand/src/builtin_macro.rs
@@ -7,6 +7,7 @@ use crate::{
 
 use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId};
 use either::Either;
+use mbe::parse_to_token_tree;
 use ra_db::{FileId, RelativePath};
 use ra_parser::FragmentKind;
 
@@ -306,10 +307,9 @@ fn include_expand(
 
     // FIXME:
     // Handle include as expression
-    let node =
-        db.parse_or_expand(file_id.into()).ok_or_else(|| mbe::ExpandError::ConversionError)?;
-    let res =
-        mbe::syntax_node_to_token_tree(&node).ok_or_else(|| mbe::ExpandError::ConversionError)?.0;
+    let res = parse_to_token_tree(&db.file_text(file_id.into()))
+        .ok_or_else(|| mbe::ExpandError::ConversionError)?
+        .0;
 
     Ok((res, FragmentKind::Items))
 }
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 2c6ae565896..43afe24ccb0 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -31,7 +31,8 @@ pub enum ExpandError {
 }
 
 pub use crate::syntax_bridge::{
-    ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap,
+    ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node,
+    TokenMap,
 };
 
 /// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index fb9fa531494..fcb73fbc7c5 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,8 +2,10 @@
 
 use ra_parser::{FragmentKind, ParseError, TreeSink};
 use ra_syntax::{
-    ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
-    SyntaxTreeBuilder, TextRange, TextUnit, T,
+    ast::{self, make::tokens::doc_comment},
+    tokenize, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind,
+    SyntaxKind::*,
+    SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, Token, T,
 };
 use rustc_hash::FxHashMap;
 use std::iter::successors;
@@ -48,9 +50,11 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenM
 /// will consume).
 pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
     let global_offset = node.text_range().start();
-    let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 };
+    let mut c = Convertor {
+        id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
+    };
     let subtree = c.go(node)?;
-    Some((subtree, c.map))
+    Some((subtree, c.id_alloc.map))
 }
 
 // The following items are what `rustc` macro can be parsed into :
@@ -89,6 +93,28 @@ pub fn token_tree_to_syntax_node(
     Ok((parse, range_map))
 }
 
+/// Convert a string to a `TokenTree`
+pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+    let (tokens, errors) = tokenize(text);
+    if !errors.is_empty() {
+        return None;
+    }
+
+    let mut conv = RawConvertor {
+        text,
+        offset: TextUnit::default(),
+        inner: tokens.iter(),
+        id_alloc: TokenIdAlloc {
+            map: Default::default(),
+            global_offset: TextUnit::default(),
+            next_id: 0,
+        },
+    };
+
+    let subtree = conv.go()?;
+    Some((subtree, conv.id_alloc.map))
+}
+
 impl TokenMap {
     pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
         let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
@@ -118,6 +144,14 @@ impl TokenMap {
         self.entries
             .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
     }
+
+    fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
+        if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
+            if let TokenTextRange::Delimiter(dim, _) = entry.1 {
+                entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
+            }
+        }
+    }
 }
 
 /// Returns the textual content of a doc comment block as a quoted string
@@ -188,12 +222,161 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
     }
 }
 
-struct Convertor {
+struct TokenIdAlloc {
     map: TokenMap,
     global_offset: TextUnit,
     next_id: u32,
 }
 
+impl TokenIdAlloc {
+    fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
+        let relative_range = absolute_range - self.global_offset;
+        let token_id = tt::TokenId(self.next_id);
+        self.next_id += 1;
+        self.map.insert(token_id, relative_range);
+        token_id
+    }
+
+    fn delim(&mut self, open_abs_range: TextRange, close_abs_range: TextRange) -> tt::TokenId {
+        let open_relative_range = open_abs_range - self.global_offset;
+        let close_relative_range = close_abs_range - self.global_offset;
+        let token_id = tt::TokenId(self.next_id);
+        self.next_id += 1;
+
+        self.map.insert_delim(token_id, open_relative_range, close_relative_range);
+        token_id
+    }
+
+    fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
+        let token_id = tt::TokenId(self.next_id);
+        self.next_id += 1;
+        self.map.insert_delim(token_id, open_abs_range, open_abs_range);
+        token_id
+    }
+
+    fn close_delim(&mut self, id: tt::TokenId, close_abs_range: TextRange) {
+        self.map.update_close_delim(id, close_abs_range);
+    }
+}
+
+/// A Raw Token (straightly from lexer) convertor
+struct RawConvertor<'a> {
+    text: &'a str,
+    offset: TextUnit,
+    id_alloc: TokenIdAlloc,
+    inner: std::slice::Iter<'a, Token>,
+}
+
+impl RawConvertor<'_> {
+    fn go(&mut self) -> Option<tt::Subtree> {
+        let mut subtree = tt::Subtree::default();
+        subtree.delimiter = None;
+        while self.peek().is_some() {
+            self.collect_leaf(&mut subtree.token_trees);
+        }
+        if subtree.token_trees.is_empty() {
+            return None;
+        }
+        if subtree.token_trees.len() == 1 {
+            if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
+                return Some(first.clone());
+            }
+        }
+        Some(subtree)
+    }
+
+    fn bump(&mut self) -> Option<(Token, TextRange)> {
+        let token = self.inner.next()?;
+        let range = TextRange::offset_len(self.offset, token.len);
+        self.offset += token.len;
+        Some((*token, range))
+    }
+
+    fn peek(&self) -> Option<Token> {
+        self.inner.as_slice().get(0).cloned()
+    }
+
+    fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
+        let (token, range) = match self.bump() {
+            None => return,
+            Some(it) => it,
+        };
+
+        let k: SyntaxKind = token.kind;
+        if k == COMMENT {
+            let node = doc_comment(&self.text[range]);
+            if let Some(tokens) = convert_doc_comment(&node) {
+                result.extend(tokens);
+            }
+            return;
+        }
+
+        result.push(if k.is_punct() {
+            let delim = match k {
+                T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
+                T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
+                T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])),
+                _ => None,
+            };
+
+            if let Some((kind, closed)) = delim {
+                let mut subtree = tt::Subtree::default();
+                let id = self.id_alloc.open_delim(range);
+                subtree.delimiter = Some(tt::Delimiter { kind, id });
+
+                while self.peek().map(|it| it.kind != closed).unwrap_or(false) {
+                    self.collect_leaf(&mut subtree.token_trees);
+                }
+                let last_range = match self.bump() {
+                    None => return,
+                    Some(it) => it.1,
+                };
+                self.id_alloc.close_delim(id, last_range);
+                subtree.into()
+            } else {
+                let spacing = match self.peek() {
+                    Some(next)
+                        if next.kind.is_trivia()
+                            || next.kind == T!['[']
+                            || next.kind == T!['{']
+                            || next.kind == T!['('] =>
+                    {
+                        tt::Spacing::Alone
+                    }
+                    Some(next) if next.kind.is_punct() => tt::Spacing::Joint,
+                    _ => tt::Spacing::Alone,
+                };
+                let char =
+                    self.text[range].chars().next().expect("Token from lexer must be single char");
+
+                tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc.alloc(range) }).into()
+            }
+        } else {
+            macro_rules! make_leaf {
+                ($i:ident) => {
+                    tt::$i { id: self.id_alloc.alloc(range), text: self.text[range].into() }.into()
+                };
+            }
+            let leaf: tt::Leaf = match k {
+                T![true] | T![false] => make_leaf!(Literal),
+                IDENT | LIFETIME => make_leaf!(Ident),
+                k if k.is_keyword() => make_leaf!(Ident),
+                k if k.is_literal() => make_leaf!(Literal),
+                _ => return,
+            };
+
+            leaf.into()
+        });
+    }
+}
+
+// FIXME: There are some duplicate logic between RawConvertor and Convertor
+// It would be nice to refactor to converting SyntaxNode to ra_parser::Token and thus
+// use RawConvertor directly. But performance-wise it may not be a good idea ?
+struct Convertor {
+    id_alloc: TokenIdAlloc,
+}
+
 impl Convertor {
     fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
         // This tree is empty
@@ -236,7 +419,7 @@ impl Convertor {
         };
         let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
             kind,
-            id: self.alloc_delim(first_child.text_range(), last_child.text_range()),
+            id: self.id_alloc.delim(first_child.text_range(), last_child.text_range()),
         });
 
         let mut token_trees = Vec::new();
@@ -273,7 +456,7 @@ impl Convertor {
                                 tt::Leaf::from(tt::Punct {
                                     char,
                                     spacing,
-                                    id: self.alloc(token.text_range()),
+                                    id: self.id_alloc.alloc(token.text_range()),
                                 })
                                 .into(),
                             );
@@ -282,7 +465,7 @@ impl Convertor {
                         macro_rules! make_leaf {
                             ($i:ident) => {
                                 tt::$i {
-                                    id: self.alloc(token.text_range()),
+                                    id: self.id_alloc.alloc(token.text_range()),
                                     text: token.text().clone(),
                                 }
                                 .into()
@@ -313,28 +496,6 @@ impl Convertor {
         let res = tt::Subtree { delimiter, token_trees };
         Some(res)
     }
-
-    fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
-        let relative_range = absolute_range - self.global_offset;
-        let token_id = tt::TokenId(self.next_id);
-        self.next_id += 1;
-        self.map.insert(token_id, relative_range);
-        token_id
-    }
-
-    fn alloc_delim(
-        &mut self,
-        open_abs_range: TextRange,
-        close_abs_range: TextRange,
-    ) -> tt::TokenId {
-        let open_relative_range = open_abs_range - self.global_offset;
-        let close_relative_range = close_abs_range - self.global_offset;
-        let token_id = tt::TokenId(self.next_id);
-        self.next_id += 1;
-
-        self.map.insert_delim(token_id, open_relative_range, close_relative_range);
-        token_id
-    }
 }
 
 struct TtTreeSink<'a> {
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 066ce150b5a..6d5d1e9e601 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1499,12 +1499,20 @@ impl MacroFixture {
     }
 }
 
-pub(crate) fn parse_macro(macro_definition: &str) -> MacroFixture {
-    let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
+pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
+    let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
     let macro_definition =
         source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
     let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
+
+    let parsed = parse_to_token_tree(
+        &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
+    )
+    .unwrap()
+    .0;
+    assert_eq!(definition_tt, parsed);
+
     let rules = MacroRules::parse(&definition_tt).unwrap();
     MacroFixture { rules }
 }
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 53d6fa56208..ae8829807ce 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -267,6 +267,12 @@ pub mod tokens {
         sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
     }
 
+    pub fn doc_comment(text: &str) -> SyntaxToken {
+        assert!(!text.trim().is_empty());
+        let sf = SourceFile::parse(text).ok().unwrap();
+        sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+    }
+
     pub fn literal(text: &str) -> SyntaxToken {
         assert_eq!(text.trim(), text);
         let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));