lexer_assignment_word factorizating: it doesnt exist anymore...
This commit is contained in:
parent
9bda99e139
commit
8427ab28bf
11 changed files with 21 additions and 62 deletions
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/12/01 12:15:50 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/17 19:34:31 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:19:18 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -36,7 +36,6 @@ enum e_lexstate
|
|||
BQUOTE,
|
||||
BACKSLASH,
|
||||
CURLY_BRACKETS,
|
||||
ASSIGNEMENT_WORD,
|
||||
END,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jhalford@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/12/13 13:09:57 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/16 15:54:15 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 19:51:06 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2017/01/26 00:07:05 by ariard #+# #+# */
|
||||
/* Updated: 2017/03/17 19:33:51 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:19:59 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2017/02/09 20:39:06 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/17 00:08:12 by ariard ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:02:48 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
/* ************************************************************************** */
|
||||
/* */
|
||||
/* ::: :::::::: */
|
||||
/* lexer_assignement_word.c :+: :+: :+: */
|
||||
/* +:+ +:+ +:+ */
|
||||
/* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2017/02/24 20:28:13 by ariard #+# #+# */
|
||||
/* Updated: 2017/03/17 19:33:12 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
#include "minishell.h"
|
||||
|
||||
int lexer_assignement_word(t_list **alst, t_lexer *lexer)
|
||||
{
|
||||
t_token *token;
|
||||
char c;
|
||||
|
||||
token = (*alst)->content;
|
||||
/* if (token->type != TK_WORD && token->type != TK_ASSIGNMENT_WORD) */
|
||||
/* { */
|
||||
/* token_append(token, lexer, 0, 0); */
|
||||
/* lexer->pos++; */
|
||||
/* return (lexer_lex(alst, lexer)); */
|
||||
/* } */
|
||||
token->type = TK_ASSIGNMENT_WORD;
|
||||
token_append(token, lexer, 0, 0);
|
||||
lexer->pos++;
|
||||
c = lexer->str[lexer->pos];
|
||||
if ((lexer->state = get_state_global(lexer)))
|
||||
return (lexer_lex(alst, lexer));
|
||||
if ((lexer->state = get_state_redir(lexer)))
|
||||
return (lexer_lex(alst, lexer));
|
||||
lexer->state = ASSIGNEMENT_WORD;
|
||||
return(lexer_lex(alst, lexer));
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/12/03 11:56:49 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/16 22:36:31 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:17:41 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ int lexer_backslash(t_list **alst, t_lexer *lexer)
|
|||
t_token *token;
|
||||
|
||||
token = (*alst)->content;
|
||||
token->type = TK_WORD;
|
||||
token->type = token->type ? token->type : TK_WORD;
|
||||
lexer->pos++;
|
||||
lexer->state = WORD;
|
||||
if (lexer->str[lexer->pos] == 0)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2017/02/09 22:03:48 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/16 22:30:59 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:07:57 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -18,7 +18,7 @@ int lexer_bquote(t_list **alst, t_lexer *lexer)
|
|||
int back;
|
||||
|
||||
token = (*alst)->content;
|
||||
token->type = TK_WORD;
|
||||
token->type = token->type ? token->type : TK_WORD;
|
||||
back = 0;
|
||||
if (lexer->str[lexer->pos] == '`')
|
||||
{
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/11/28 18:36:58 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/17 19:27:42 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:08:02 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ int lexer_dquote(t_list **alst, t_lexer *lexer)
|
|||
t_token *token;
|
||||
|
||||
token = (*alst)->content;
|
||||
token->type = TK_WORD;
|
||||
token->type = token->type ? token->type : TK_WORD;
|
||||
if (lexer->str[lexer->pos] == '"')
|
||||
{
|
||||
if (get_lexer_stack(*lexer) == DQUOTE && (lexer->state = WORD))
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2017/02/09 17:08:51 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/17 00:09:05 by ariard ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:21:33 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -29,7 +29,6 @@ int (*g_lexer[])(t_list **alst, t_lexer *lexer) =
|
|||
&lexer_bquote,
|
||||
&lexer_backslash,
|
||||
&lexer_curly_braces,
|
||||
&lexer_assignement_word,
|
||||
&lexer_end,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/12/03 12:07:08 by jhalford #+# #+# */
|
||||
/* Updated: 2017/02/17 15:28:13 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:16:30 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ int lexer_quote(t_list **alst, t_lexer *lexer)
|
|||
t_token *token;
|
||||
|
||||
token = (*alst)->content;
|
||||
token->type = TK_WORD;
|
||||
token->type = token->type ? token->type : TK_WORD;
|
||||
if (lexer->str[lexer->pos] == '\'')
|
||||
{
|
||||
lexer->pos++;
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2016/12/03 12:07:11 by jhalford #+# #+# */
|
||||
/* Updated: 2017/03/17 19:33:42 by jhalford ### ########.fr */
|
||||
/* Updated: 2017/03/17 20:17:23 by jhalford ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -18,23 +18,21 @@ int lexer_word(t_list **alst, t_lexer *lexer)
|
|||
t_lexstate state;
|
||||
|
||||
token = (*alst)->content;
|
||||
token->type = TK_WORD;
|
||||
token->type = token->type ? token->type : TK_WORD;
|
||||
if ((state = get_state_global(lexer))
|
||||
|| (state = get_state_redir(lexer)))
|
||||
{
|
||||
lexer->state = state;
|
||||
return (lexer_lex(alst, lexer));
|
||||
}
|
||||
if (lexer->str[lexer->pos] == '='
|
||||
else if (lexer->str[lexer->pos] == '='
|
||||
&& word_is_assignment((char *[]){token->data, (char *)token->esc}))
|
||||
{
|
||||
lexer->state = ASSIGNEMENT_WORD;
|
||||
return (lexer_lex(alst, lexer));
|
||||
token->type = TK_ASSIGNMENT_WORD;
|
||||
token_append(token, lexer, 0, 0);
|
||||
lexer->pos++;
|
||||
}
|
||||
else
|
||||
{
|
||||
token_append(token, lexer, 0, 0);
|
||||
lexer->pos++;
|
||||
token_append(token, lexer, 0, 0);
|
||||
lexer->pos++;
|
||||
}
|
||||
return (lexer_lex(alst, lexer));
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue