execution for without expansion
This commit is contained in:
parent
cba548d0ae
commit
4849c899e4
10 changed files with 48 additions and 22 deletions
|
|
@ -1,4 +1,8 @@
|
||||||
for i in hello bonjour salut comment
|
for i in hello bonjour salut comment
|
||||||
do
|
do
|
||||||
ls
|
while cat efezf
|
||||||
|
do
|
||||||
|
echo INSIDE
|
||||||
|
done
|
||||||
|
ls | cat
|
||||||
done
|
done
|
||||||
|
|
|
||||||
|
|
@ -22,11 +22,12 @@ int exec_for(t_btree **ast)
|
||||||
node = (*ast)->item;
|
node = (*ast)->item;
|
||||||
temp = node->data.cmd.wordlist;
|
temp = node->data.cmd.wordlist;
|
||||||
var = temp->content;
|
var = temp->content;
|
||||||
builtin_setenv("setenv", (char*[]){var, 0}, data_singleton()->local_var);
|
temp = temp->next;
|
||||||
|
// declare error bad identifier
|
||||||
while (temp)
|
while (temp)
|
||||||
{
|
{
|
||||||
//process expansion av = token_to_char(temp->content)
|
builtin_setenv("setenv", (char*[]){var, temp->content, 0},
|
||||||
builtin_setenv("setenv", (char*[]){var, 0}, data_singleton()->local_var);
|
data_singleton()->local_var);
|
||||||
ft_exec(&(*ast)->right);
|
ft_exec(&(*ast)->right);
|
||||||
temp = temp->next;
|
temp = temp->next;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ t_execmap g_execmap[] =
|
||||||
{TK_ELIF, &exec_elif},
|
{TK_ELIF, &exec_elif},
|
||||||
{TK_ELSE, &exec_else},
|
{TK_ELSE, &exec_else},
|
||||||
{TK_UNTIL, &exec_until},
|
{TK_UNTIL, &exec_until},
|
||||||
|
{TK_FOR, &exec_for},
|
||||||
/* {TK_SUBSHELL, &exec_}, */
|
/* {TK_SUBSHELL, &exec_}, */
|
||||||
{CMD, &exec_cmd},
|
{CMD, &exec_cmd},
|
||||||
{0, 0},
|
{0, 0},
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,6 @@ t_lexstate get_state_global(t_lexer *lexer)
|
||||||
char c;
|
char c;
|
||||||
|
|
||||||
c = lexer->str[lexer->pos];
|
c = lexer->str[lexer->pos];
|
||||||
DG("check, c=%c", lexer->str[lexer->pos]);
|
|
||||||
if (ft_is_delim(c))
|
if (ft_is_delim(c))
|
||||||
return (DELIM);
|
return (DELIM);
|
||||||
else if (c == '#')
|
else if (c == '#')
|
||||||
|
|
|
||||||
|
|
@ -15,11 +15,20 @@
|
||||||
int isionumber(t_btree **ast, t_list **lst)
|
int isionumber(t_btree **ast, t_list **lst)
|
||||||
{
|
{
|
||||||
t_token *token;
|
t_token *token;
|
||||||
|
t_astnode *node;
|
||||||
|
|
||||||
(void)ast;
|
|
||||||
token = (*lst)->content;
|
token = (*lst)->content;
|
||||||
if (token->type == TK_IO_NUMBER)
|
if (*ast)
|
||||||
return (1);
|
{
|
||||||
|
node = (*ast)->item;
|
||||||
|
if (node->type == CMD && token->type == TK_IO_NUMBER)
|
||||||
|
return (1);
|
||||||
|
}
|
||||||
|
if (!*ast)
|
||||||
|
{
|
||||||
|
if (token->type == TK_IO_NUMBER)
|
||||||
|
return (1);
|
||||||
|
}
|
||||||
return (0);
|
return (0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,25 @@
|
||||||
int isdir_sep(t_btree **ast, t_list **list)
|
int isdir_sep(t_btree **ast, t_list **list)
|
||||||
{
|
{
|
||||||
t_token *token;
|
t_token *token;
|
||||||
|
t_astnode *node;
|
||||||
|
|
||||||
(void)ast;
|
|
||||||
token = (*list)->content;
|
token = (*list)->content;
|
||||||
if (token->type == TK_LESS || token->type == TK_GREAT
|
if (*ast)
|
||||||
|| token->type == TK_GREATAND || token->type == TK_LESSAND
|
{
|
||||||
|| token->type == TK_DLESS || token->type == TK_DGREAT)
|
node = (*ast)->item;
|
||||||
|
if ((node->type == CMD || node->type == TK_IO_NUMBER) &&
|
||||||
|
(token->type == TK_LESS || token->type == TK_GREAT
|
||||||
|
|| token->type == TK_GREATAND || token->type == TK_LESSAND
|
||||||
|
|| token->type == TK_DLESS || token->type == TK_DGREAT))
|
||||||
|
return (1);
|
||||||
|
}
|
||||||
|
if (!*ast)
|
||||||
|
{
|
||||||
|
if (token->type == TK_LESS || token->type == TK_GREAT
|
||||||
|
|| token->type == TK_GREATAND || token->type == TK_LESSAND
|
||||||
|
|| token->type == TK_DLESS || token->type == TK_DGREAT)
|
||||||
return (1);
|
return (1);
|
||||||
|
}
|
||||||
return (0);
|
return (0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -315,21 +315,21 @@ int aggregate_sym(t_list **stack, t_sym *new_sym, t_parstate *state)
|
||||||
|
|
||||||
i = 0;
|
i = 0;
|
||||||
head = (*stack)->content;
|
head = (*stack)->content;
|
||||||
// DG("aggregate head %s && sym %s",
|
DG("aggregate head %s && sym %s",
|
||||||
// read_state(*head), read_state(*new_sym));
|
read_state(*head), read_state(*new_sym));
|
||||||
while (g_aggrematch[i].top)
|
while (g_aggrematch[i].top)
|
||||||
{
|
{
|
||||||
if (*new_sym == g_aggrematch[i].top
|
if (*new_sym == g_aggrematch[i].top
|
||||||
&& MATCH_STACK(*head, g_aggrematch[i].under))
|
&& MATCH_STACK(*head, g_aggrematch[i].under))
|
||||||
|
|
||||||
{
|
{
|
||||||
// DG("MATCH : %s", read_state(g_aggrematch[i].new_sym));
|
DG("MATCH : %s", read_state(g_aggrematch[i].new_sym));
|
||||||
*new_sym = g_aggrematch[i].new_sym;
|
*new_sym = g_aggrematch[i].new_sym;
|
||||||
if (g_aggrematch[i].erase_sym)
|
if (g_aggrematch[i].erase_sym)
|
||||||
{
|
{
|
||||||
pop_stack(stack, g_aggrematch[i].erase_sym);
|
pop_stack(stack, g_aggrematch[i].erase_sym);
|
||||||
head = (*stack)->content;
|
head = (*stack)->content;
|
||||||
// DG("stack after pop: %s", read_state(*head));
|
DG("stack after pop: %s", read_state(*head));
|
||||||
}
|
}
|
||||||
if (eval_sym(stack, *new_sym))
|
if (eval_sym(stack, *new_sym))
|
||||||
return ((*state = ERROR));
|
return ((*state = ERROR));
|
||||||
|
|
|
||||||
|
|
@ -1027,7 +1027,7 @@ int eval_sym(t_list **stack, t_sym new_sym)
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
head = (*stack)->content;
|
head = (*stack)->content;
|
||||||
// DG("eval head %s && sym %s", read_state(*head), read_state(new_sym));
|
DG("eval head %s && sym %s", read_state(*head), read_state(new_sym));
|
||||||
i = 0;
|
i = 0;
|
||||||
while (g_stackmatch[i].top)
|
while (g_stackmatch[i].top)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ int ft_parse(t_btree **ast, t_list **token, t_parser *parser)
|
||||||
while (*token)
|
while (*token)
|
||||||
{
|
{
|
||||||
produce_sym(&parser->stack, parser->new_sym, token);
|
produce_sym(&parser->stack, parser->new_sym, token);
|
||||||
// DG("new sym %s", read_state(*parser->new_sym));
|
DG("new sym %s", read_state(*parser->new_sym));
|
||||||
if (eval_sym(&parser->stack, *parser->new_sym))
|
if (eval_sym(&parser->stack, *parser->new_sym))
|
||||||
return ((parser->state = ERROR));
|
return ((parser->state = ERROR));
|
||||||
else
|
else
|
||||||
|
|
|
||||||
|
|
@ -107,8 +107,8 @@ int produce_sym(t_list **stack, t_sym *new_sym, t_list **lst)
|
||||||
|
|
||||||
token = (*lst)->content;
|
token = (*lst)->content;
|
||||||
head = (*stack)->content;
|
head = (*stack)->content;
|
||||||
// DG("produce stack : %s && token : %s", read_state(*head),
|
DG("produce stack : %s && token : %s", read_state(*head),
|
||||||
// read_state(token->type));
|
read_state(token->type));
|
||||||
i = 0;
|
i = 0;
|
||||||
*new_sym = 0;
|
*new_sym = 0;
|
||||||
while (g_prodmatch[i].new_sym)
|
while (g_prodmatch[i].new_sym)
|
||||||
|
|
@ -116,7 +116,7 @@ int produce_sym(t_list **stack, t_sym *new_sym, t_list **lst)
|
||||||
if (token->type == g_prodmatch[i].token
|
if (token->type == g_prodmatch[i].token
|
||||||
&& *head == g_prodmatch[i].stack)
|
&& *head == g_prodmatch[i].stack)
|
||||||
{
|
{
|
||||||
// DG("MATCH : %s", read_state(g_prodmatch[i].new_sym));
|
DG("MATCH : %s", read_state(g_prodmatch[i].new_sym));
|
||||||
*new_sym = g_prodmatch[i].new_sym;
|
*new_sym = g_prodmatch[i].new_sym;
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue