lexer_assignment_word factorizating: it doesnt exist anymore...

This commit is contained in:
Jack Halford 2017-03-17 20:22:09 +01:00
parent 9bda99e139
commit 8427ab28bf
11 changed files with 21 additions and 62 deletions

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/01 12:15:50 by jhalford #+# #+# */ /* Created: 2016/12/01 12:15:50 by jhalford #+# #+# */
/* Updated: 2017/03/17 19:34:31 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:19:18 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -36,7 +36,6 @@ enum e_lexstate
BQUOTE, BQUOTE,
BACKSLASH, BACKSLASH,
CURLY_BRACKETS, CURLY_BRACKETS,
ASSIGNEMENT_WORD,
END, END,
}; };

View file

@ -6,7 +6,7 @@
/* By: jhalford <jhalford@student.42.fr> +#+ +:+ +#+ */ /* By: jhalford <jhalford@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/13 13:09:57 by jhalford #+# #+# */ /* Created: 2016/12/13 13:09:57 by jhalford #+# #+# */
/* Updated: 2017/03/16 15:54:15 by jhalford ### ########.fr */ /* Updated: 2017/03/17 19:51:06 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */

View file

@ -6,7 +6,7 @@
/* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */ /* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2017/01/26 00:07:05 by ariard #+# #+# */ /* Created: 2017/01/26 00:07:05 by ariard #+# #+# */
/* Updated: 2017/03/17 19:33:51 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:19:59 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/09 20:39:06 by jhalford #+# #+# */ /* Created: 2017/02/09 20:39:06 by jhalford #+# #+# */
/* Updated: 2017/03/17 00:08:12 by ariard ### ########.fr */ /* Updated: 2017/03/17 20:02:48 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */

View file

@ -1,37 +0,0 @@
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* lexer_assignement_word.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/24 20:28:13 by ariard #+# #+# */
/* Updated: 2017/03/17 19:33:12 by jhalford ### ########.fr */
/* */
/* ************************************************************************** */
#include "minishell.h"
int lexer_assignement_word(t_list **alst, t_lexer *lexer)
{
t_token *token;
char c;
token = (*alst)->content;
/* if (token->type != TK_WORD && token->type != TK_ASSIGNMENT_WORD) */
/* { */
/* token_append(token, lexer, 0, 0); */
/* lexer->pos++; */
/* return (lexer_lex(alst, lexer)); */
/* } */
token->type = TK_ASSIGNMENT_WORD;
token_append(token, lexer, 0, 0);
lexer->pos++;
c = lexer->str[lexer->pos];
if ((lexer->state = get_state_global(lexer)))
return (lexer_lex(alst, lexer));
if ((lexer->state = get_state_redir(lexer)))
return (lexer_lex(alst, lexer));
lexer->state = ASSIGNEMENT_WORD;
return(lexer_lex(alst, lexer));
}

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/03 11:56:49 by jhalford #+# #+# */ /* Created: 2016/12/03 11:56:49 by jhalford #+# #+# */
/* Updated: 2017/03/16 22:36:31 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:17:41 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -17,7 +17,7 @@ int lexer_backslash(t_list **alst, t_lexer *lexer)
t_token *token; t_token *token;
token = (*alst)->content; token = (*alst)->content;
token->type = TK_WORD; token->type = token->type ? token->type : TK_WORD;
lexer->pos++; lexer->pos++;
lexer->state = WORD; lexer->state = WORD;
if (lexer->str[lexer->pos] == 0) if (lexer->str[lexer->pos] == 0)

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/09 22:03:48 by jhalford #+# #+# */ /* Created: 2017/02/09 22:03:48 by jhalford #+# #+# */
/* Updated: 2017/03/16 22:30:59 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:07:57 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -18,7 +18,7 @@ int lexer_bquote(t_list **alst, t_lexer *lexer)
int back; int back;
token = (*alst)->content; token = (*alst)->content;
token->type = TK_WORD; token->type = token->type ? token->type : TK_WORD;
back = 0; back = 0;
if (lexer->str[lexer->pos] == '`') if (lexer->str[lexer->pos] == '`')
{ {

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/11/28 18:36:58 by jhalford #+# #+# */ /* Created: 2016/11/28 18:36:58 by jhalford #+# #+# */
/* Updated: 2017/03/17 19:27:42 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:08:02 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -17,7 +17,7 @@ int lexer_dquote(t_list **alst, t_lexer *lexer)
t_token *token; t_token *token;
token = (*alst)->content; token = (*alst)->content;
token->type = TK_WORD; token->type = token->type ? token->type : TK_WORD;
if (lexer->str[lexer->pos] == '"') if (lexer->str[lexer->pos] == '"')
{ {
if (get_lexer_stack(*lexer) == DQUOTE && (lexer->state = WORD)) if (get_lexer_stack(*lexer) == DQUOTE && (lexer->state = WORD))

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/09 17:08:51 by jhalford #+# #+# */ /* Created: 2017/02/09 17:08:51 by jhalford #+# #+# */
/* Updated: 2017/03/17 00:09:05 by ariard ### ########.fr */ /* Updated: 2017/03/17 20:21:33 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -29,7 +29,6 @@ int (*g_lexer[])(t_list **alst, t_lexer *lexer) =
&lexer_bquote, &lexer_bquote,
&lexer_backslash, &lexer_backslash,
&lexer_curly_braces, &lexer_curly_braces,
&lexer_assignement_word,
&lexer_end, &lexer_end,
}; };

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/03 12:07:08 by jhalford #+# #+# */ /* Created: 2016/12/03 12:07:08 by jhalford #+# #+# */
/* Updated: 2017/02/17 15:28:13 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:16:30 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -17,7 +17,7 @@ int lexer_quote(t_list **alst, t_lexer *lexer)
t_token *token; t_token *token;
token = (*alst)->content; token = (*alst)->content;
token->type = TK_WORD; token->type = token->type ? token->type : TK_WORD;
if (lexer->str[lexer->pos] == '\'') if (lexer->str[lexer->pos] == '\'')
{ {
lexer->pos++; lexer->pos++;

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */ /* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/03 12:07:11 by jhalford #+# #+# */ /* Created: 2016/12/03 12:07:11 by jhalford #+# #+# */
/* Updated: 2017/03/17 19:33:42 by jhalford ### ########.fr */ /* Updated: 2017/03/17 20:17:23 by jhalford ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -18,23 +18,21 @@ int lexer_word(t_list **alst, t_lexer *lexer)
t_lexstate state; t_lexstate state;
token = (*alst)->content; token = (*alst)->content;
token->type = TK_WORD; token->type = token->type ? token->type : TK_WORD;
if ((state = get_state_global(lexer)) if ((state = get_state_global(lexer))
|| (state = get_state_redir(lexer))) || (state = get_state_redir(lexer)))
{
lexer->state = state; lexer->state = state;
return (lexer_lex(alst, lexer)); else if (lexer->str[lexer->pos] == '='
}
if (lexer->str[lexer->pos] == '='
&& word_is_assignment((char *[]){token->data, (char *)token->esc})) && word_is_assignment((char *[]){token->data, (char *)token->esc}))
{ {
lexer->state = ASSIGNEMENT_WORD; token->type = TK_ASSIGNMENT_WORD;
return (lexer_lex(alst, lexer)); token_append(token, lexer, 0, 0);
lexer->pos++;
} }
else else
{ {
token_append(token, lexer, 0, 0); token_append(token, lexer, 0, 0);
lexer->pos++; lexer->pos++;
} }
return (lexer_lex(alst, lexer)); return (lexer_lex(alst, lexer));
} }