2019-02-21 06:24:42 -06:00
|
|
|
//! Implementation of incremental re-parsing.
|
|
|
|
//!
|
|
|
|
//! We use two simple strategies for this:
|
|
|
|
//! - if the edit modifies only a single token (like changing an identifier's
|
|
|
|
//! letter), we replace only this token.
|
|
|
|
//! - otherwise, we search for the nearest `{}` block which contains the edit
|
|
|
|
//! and try to parse only this block.
|
|
|
|
|
2019-02-21 04:37:32 -06:00
|
|
|
use ra_parser::Reparser;
|
2019-07-04 15:05:17 -05:00
|
|
|
use ra_text_edit::AtomTextEdit;
|
2019-02-21 04:37:32 -06:00
|
|
|
|
2019-02-20 06:47:32 -06:00
|
|
|
use crate::{
|
|
|
|
algo,
|
|
|
|
parsing::{
|
2019-07-04 15:05:17 -05:00
|
|
|
lexer::{tokenize, Token},
|
2019-02-23 07:07:29 -06:00
|
|
|
text_token_source::TextTokenSource,
|
|
|
|
text_tree_sink::TextTreeSink,
|
2019-05-15 07:35:47 -05:00
|
|
|
},
|
2019-07-04 15:05:17 -05:00
|
|
|
syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode},
|
|
|
|
SyntaxError,
|
|
|
|
SyntaxKind::*,
|
|
|
|
TextRange, TextUnit, T,
|
2019-02-20 06:47:32 -06:00
|
|
|
};
|
|
|
|
|
2018-09-15 06:35:55 -05:00
|
|
|
pub(crate) fn incremental_reparse(
|
2019-01-07 07:15:47 -06:00
|
|
|
node: &SyntaxNode,
|
2018-12-11 12:07:17 -06:00
|
|
|
edit: &AtomTextEdit,
|
2018-09-15 06:35:55 -05:00
|
|
|
errors: Vec<SyntaxError>,
|
2019-03-30 05:25:53 -05:00
|
|
|
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
|
|
|
if let Some((green, old_range)) = reparse_token(node, &edit) {
|
|
|
|
return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range));
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
|
|
|
|
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
|
|
|
}
|
|
|
|
None
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
fn reparse_token<'node>(
|
2019-03-21 13:27:00 -05:00
|
|
|
root: &'node SyntaxNode,
|
2018-12-11 12:07:17 -06:00
|
|
|
edit: &AtomTextEdit,
|
2019-03-30 05:25:53 -05:00
|
|
|
) -> Option<(GreenNode, TextRange)> {
|
2019-07-18 11:23:05 -05:00
|
|
|
let token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
|
2019-03-30 05:25:53 -05:00
|
|
|
match token.kind() {
|
2018-10-31 16:38:18 -05:00
|
|
|
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
2019-03-30 05:25:53 -05:00
|
|
|
if token.kind() == WHITESPACE || token.kind() == COMMENT {
|
2019-03-21 15:29:27 -05:00
|
|
|
// removing a new line may extends previous token
|
2019-03-30 05:25:53 -05:00
|
|
|
if token.text().to_string()[edit.delete - token.range().start()].contains('\n') {
|
2019-03-21 15:29:27 -05:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-18 11:23:05 -05:00
|
|
|
let text = get_text_after_edit(token.clone().into(), &edit);
|
2019-03-30 05:25:53 -05:00
|
|
|
let lex_tokens = tokenize(&text);
|
|
|
|
let lex_token = match lex_tokens[..] {
|
|
|
|
[lex_token] if lex_token.kind == token.kind() => lex_token,
|
2018-09-15 06:35:55 -05:00
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
if lex_token.kind == IDENT && is_contextual_kw(&text) {
|
2018-09-15 06:35:55 -05:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
if let Some(next_char) = root.text().char_at(token.range().end()) {
|
2019-03-21 13:27:00 -05:00
|
|
|
let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char));
|
|
|
|
if tokens_with_next_char.len() == 1 {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-08 17:06:30 -05:00
|
|
|
let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into());
|
2019-03-30 05:25:53 -05:00
|
|
|
Some((token.replace_with(new_token), token.range()))
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reparse_block<'node>(
|
2019-03-30 05:25:53 -05:00
|
|
|
root: &'node SyntaxNode,
|
2018-12-11 12:07:17 -06:00
|
|
|
edit: &AtomTextEdit,
|
2019-03-30 05:25:53 -05:00
|
|
|
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
|
|
|
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
|
2019-07-18 11:23:05 -05:00
|
|
|
let text = get_text_after_edit(node.clone().into(), &edit);
|
2018-09-15 06:35:55 -05:00
|
|
|
let tokens = tokenize(&text);
|
|
|
|
if !is_balanced(&tokens) {
|
|
|
|
return None;
|
|
|
|
}
|
2019-05-25 07:31:53 -05:00
|
|
|
let mut token_source = TextTokenSource::new(&text, &tokens);
|
2019-02-23 07:07:29 -06:00
|
|
|
let mut tree_sink = TextTreeSink::new(&text, &tokens);
|
2019-05-25 07:31:53 -05:00
|
|
|
reparser.parse(&mut token_source, &mut tree_sink);
|
2019-02-21 04:37:32 -06:00
|
|
|
let (green, new_errors) = tree_sink.finish();
|
2019-03-30 05:25:53 -05:00
|
|
|
Some((node.replace_with(green), new_errors, node.range()))
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
|
|
|
|
let edit = AtomTextEdit::replace(edit.delete - element.range().start(), edit.insert.clone());
|
|
|
|
let text = match element {
|
|
|
|
SyntaxElement::Token(token) => token.text().to_string(),
|
|
|
|
SyntaxElement::Node(node) => node.text().to_string(),
|
|
|
|
};
|
|
|
|
edit.apply(text)
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn is_contextual_kw(text: &str) -> bool {
|
|
|
|
match text {
|
2018-10-15 16:44:23 -05:00
|
|
|
"auto" | "default" | "union" => true,
|
2018-09-15 06:35:55 -05:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-18 11:23:05 -05:00
|
|
|
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
2019-03-30 05:25:53 -05:00
|
|
|
let node = algo::find_covering_element(node, range);
|
|
|
|
let mut ancestors = match node {
|
|
|
|
SyntaxElement::Token(it) => it.parent().ancestors(),
|
|
|
|
SyntaxElement::Node(it) => it.ancestors(),
|
|
|
|
};
|
|
|
|
ancestors.find_map(|node| {
|
|
|
|
let first_child = node.first_child_or_token().map(|it| it.kind());
|
2019-02-21 03:12:04 -06:00
|
|
|
let parent = node.parent().map(|it| it.kind());
|
2019-02-21 04:37:32 -06:00
|
|
|
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
2019-02-21 03:12:04 -06:00
|
|
|
})
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn is_balanced(tokens: &[Token]) -> bool {
|
2018-10-17 18:25:37 -05:00
|
|
|
if tokens.is_empty()
|
2019-05-15 07:35:47 -05:00
|
|
|
|| tokens.first().unwrap().kind != T!['{']
|
|
|
|
|| tokens.last().unwrap().kind != T!['}']
|
2018-10-15 16:44:23 -05:00
|
|
|
{
|
2018-09-15 06:35:55 -05:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
let mut balance = 0usize;
|
2019-03-21 14:33:05 -05:00
|
|
|
for t in &tokens[1..tokens.len() - 1] {
|
2018-09-15 06:35:55 -05:00
|
|
|
match t.kind {
|
2019-05-15 07:35:47 -05:00
|
|
|
T!['{'] => balance += 1,
|
|
|
|
T!['}'] => {
|
2018-10-15 16:44:23 -05:00
|
|
|
balance = match balance.checked_sub(1) {
|
|
|
|
Some(b) => b,
|
|
|
|
None => return false,
|
|
|
|
}
|
|
|
|
}
|
2018-09-15 06:35:55 -05:00
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
balance == 0
|
|
|
|
}
|
|
|
|
|
|
|
|
fn merge_errors(
|
|
|
|
old_errors: Vec<SyntaxError>,
|
|
|
|
new_errors: Vec<SyntaxError>,
|
2019-03-30 05:25:53 -05:00
|
|
|
old_range: TextRange,
|
2018-12-11 12:07:17 -06:00
|
|
|
edit: &AtomTextEdit,
|
2018-09-15 06:35:55 -05:00
|
|
|
) -> Vec<SyntaxError> {
|
|
|
|
let mut res = Vec::new();
|
|
|
|
for e in old_errors {
|
2019-03-30 05:25:53 -05:00
|
|
|
if e.offset() <= old_range.start() {
|
2018-09-15 06:35:55 -05:00
|
|
|
res.push(e)
|
2019-03-30 05:25:53 -05:00
|
|
|
} else if e.offset() >= old_range.end() {
|
2019-03-21 12:22:06 -05:00
|
|
|
res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len()));
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for e in new_errors {
|
2019-03-30 05:25:53 -05:00
|
|
|
res.push(e.add_offset(old_range.start(), 0.into()));
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
2018-09-15 07:35:30 -05:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2019-07-04 15:05:17 -05:00
|
|
|
use test_utils::{assert_eq_text, extract_range};
|
2018-12-06 12:16:37 -06:00
|
|
|
|
|
|
|
use super::*;
|
2019-07-04 15:05:17 -05:00
|
|
|
use crate::{AstNode, Parse, SourceFile};
|
2018-09-15 07:35:30 -05:00
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
2018-09-15 10:05:08 -05:00
|
|
|
let (range, before) = extract_range(before);
|
2019-01-08 12:59:55 -06:00
|
|
|
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
|
|
|
let after = edit.apply(before.clone());
|
2018-09-15 07:35:30 -05:00
|
|
|
|
2019-05-28 09:34:28 -05:00
|
|
|
let fully_reparsed = SourceFile::parse(&after);
|
2019-07-18 11:23:05 -05:00
|
|
|
let incrementally_reparsed: Parse<SourceFile> = {
|
2019-01-07 07:15:47 -06:00
|
|
|
let f = SourceFile::parse(&before);
|
2019-02-08 05:49:43 -06:00
|
|
|
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
2019-03-30 05:25:53 -05:00
|
|
|
let (green, new_errors, range) =
|
2019-07-18 11:23:05 -05:00
|
|
|
incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap();
|
2019-03-30 05:25:53 -05:00
|
|
|
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
2019-07-18 11:23:05 -05:00
|
|
|
Parse::new(green, new_errors)
|
2018-09-15 10:05:08 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
assert_eq_text!(
|
2019-07-18 11:23:05 -05:00
|
|
|
&fully_reparsed.tree().syntax().debug_dump(),
|
|
|
|
&incrementally_reparsed.tree().syntax().debug_dump(),
|
2019-03-30 05:25:53 -05:00
|
|
|
);
|
2018-09-15 10:05:08 -05:00
|
|
|
}
|
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
#[test] // FIXME: some test here actually test token reparsing
|
2018-09-15 10:05:08 -05:00
|
|
|
fn reparse_block_tests() {
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
let x = foo + <|>bar<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"baz",
|
2019-03-30 05:25:53 -05:00
|
|
|
3,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
let x = foo<|> + bar<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"baz",
|
2019-03-30 05:25:53 -05:00
|
|
|
25,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
struct Foo {
|
|
|
|
f: foo<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
",\n g: (),",
|
2019-03-30 05:25:53 -05:00
|
|
|
14,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo {
|
|
|
|
let;
|
|
|
|
1 + 1;
|
|
|
|
<|>92<|>;
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"62",
|
2019-03-30 05:25:53 -05:00
|
|
|
31, // FIXME: reparse only int literal here
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
mod foo {
|
|
|
|
fn <|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
2019-03-30 05:25:53 -05:00
|
|
|
11,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
2019-03-30 05:25:53 -05:00
|
|
|
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
trait Foo {
|
|
|
|
type <|>Foo<|>;
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"Output",
|
2019-03-30 05:25:53 -05:00
|
|
|
3,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
impl IntoIterator<Item=i32> for Foo {
|
|
|
|
f<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"n next(",
|
2019-03-30 05:25:53 -05:00
|
|
|
9,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
2019-03-30 05:25:53 -05:00
|
|
|
do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10);
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
pub enum A {
|
|
|
|
Foo<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"\nBar;\n",
|
2019-03-30 05:25:53 -05:00
|
|
|
11,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
foo!{a, b<|><|> d}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
", c[3]",
|
2019-03-30 05:25:53 -05:00
|
|
|
8,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
vec![<|><|>]
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"123",
|
2019-03-30 05:25:53 -05:00
|
|
|
14,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
extern {
|
|
|
|
fn<|>;<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" exit(code: c_int)",
|
2019-03-30 05:25:53 -05:00
|
|
|
11,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
2018-09-15 10:05:08 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2019-03-30 05:25:53 -05:00
|
|
|
fn reparse_token_tests() {
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"<|><|>
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo() -> i32 { 1 }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"\n\n\n \n",
|
2019-03-30 05:25:53 -05:00
|
|
|
1,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() -> <|><|> {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \n",
|
2019-03-30 05:25:53 -05:00
|
|
|
2,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn <|>foo<|>() -> i32 { 1 }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
2019-03-30 05:25:53 -05:00
|
|
|
3,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo<|><|>foo() { }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
2019-03-30 05:25:53 -05:00
|
|
|
6,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo /* <|><|> */ () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"some comment",
|
2019-03-30 05:25:53 -05:00
|
|
|
6,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn baz <|><|> () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \t\t\n\n",
|
2019-03-30 05:25:53 -05:00
|
|
|
2,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn baz <|><|> () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \t\t\n\n",
|
2019-03-30 05:25:53 -05:00
|
|
|
2,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
/// foo <|><|>omment
|
|
|
|
mod { }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"c",
|
2019-03-30 05:25:53 -05:00
|
|
|
14,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn -> &str { "Hello<|><|>" }
|
2018-10-15 16:44:23 -05:00
|
|
|
"#,
|
|
|
|
", world",
|
2019-03-30 05:25:53 -05:00
|
|
|
7,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn -> &str { // "Hello<|><|>"
|
2018-10-15 16:44:23 -05:00
|
|
|
"#,
|
|
|
|
", world",
|
2019-03-30 05:25:53 -05:00
|
|
|
10,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r##"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn -> &str { r#"Hello<|><|>"#
|
2018-10-15 16:44:23 -05:00
|
|
|
"##,
|
|
|
|
", world",
|
2019-03-30 05:25:53 -05:00
|
|
|
10,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
#[derive(<|>Copy<|>)]
|
|
|
|
enum Foo {
|
|
|
|
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"Clone",
|
2019-03-30 05:25:53 -05:00
|
|
|
4,
|
2018-10-15 16:44:23 -05:00
|
|
|
);
|
2018-09-15 07:35:30 -05:00
|
|
|
}
|
2018-10-02 09:07:12 -05:00
|
|
|
}
|