bang stories

This commit is contained in:
Antoine Riard 2017-03-27 19:55:56 +02:00
parent c601d818a2
commit 9dfb84bfcf
10 changed files with 109 additions and 127 deletions

View file

@ -1,4 +1,4 @@
.x........................xxxx....x.
.x..........................xx....x.
----------------------------------------------------------------
@ -9,17 +9,16 @@
Before test:
 01: rm -rf "./size"
02: rm -rf "TOKEN201703270303"
03: echo '^'$(echo TOKEN201703270303_FILE_TOKEN201703270303_STDOUT | wc -c)'$' > "./size"
02: rm -rf "TOKEN201703271859"
03: echo '^'$(echo TOKEN201703271859_FILE_TOKEN201703271859_STDOUT | wc -c)'$' > "./size"
STDIN:
 01: mkdir TOKEN201703270303 ; cd TOKEN201703270303 ; touch TOKEN201703270303_FILE ; ls -1 ; ls | cat | wc -c > TOKEN201703270303_STDOUT ; cat TOKEN201703270303_STDOUT
 01: mkdir TOKEN201703271859 ; cd TOKEN201703271859 ; touch TOKEN201703271859_FILE ; ls -1 ; ls | cat | wc -c > TOKEN201703271859_STDOUT ; cat TOKEN201703271859_STDOUT
STDOUT:
 SUCCESS expected_to match_regex `TOKEN201703270303_FILE$`
 FAILURE expected_to match_regex `TOKEN201703271859_FILE$`
 FAILURE expected_to match_each_regex_of_file `./size`
 01: TOKEN201703270303_FILE
02: 23
 (no output)
STDERR:
 SUCCESS expected_to be_empty
@ -27,62 +26,6 @@
----------------------------------------------------------------
21sh/redirections/outputs/truncating/multiple/002-together-stdout-first (FAILED)
Description:
 A right redirection can be associated to the twice outputs by using `M>&N`, that means `redirect M to where N is redirected`.
In this test the standard output is specified first.
Before test:
 01: rm -f new_file_stderr_and_stdout
STDIN:
 01: ./write_on_stdout_and_stderr TOKEN201703270303_1 TOKEN201703270303_2 1>new_file_stderr_and_stdout 2>&1
STDOUT:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 (no output)
STDERR:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 (no output)
MISC:
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_1$`
 SUCCESS expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_2$`
----------------------------------------------------------------
21sh/redirections/outputs/truncating/multiple/003-together-stderr-first (FAILED)
Description:
 A right redirection can be associated to the twice outputs by using `M>&N`, that means `redirect M to where N is redirected`.
In this test the standard error is specified first.
Before test:
 01: rm -f new_file_stderr_and_stdout
STDIN:
 01: ./write_on_stdout_and_stderr TOKEN201703270303_1 TOKEN201703270303_2 2>new_file_stderr_and_stdout 1>&2
STDOUT:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 (no output)
STDERR:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 (no output)
MISC:
 SUCCESS expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_2$`
----------------------------------------------------------------
21sh/redirections/outputs/truncating/multiple/004-together (FAILED)
Description:
@ -92,21 +35,21 @@
 01: rm -f "new_file_stderr_and_stdout"
STDIN:
 01: ./write_on_stdout_and_stderr TOKEN201703270303_1 TOKEN201703270303_2 &>new_file_stderr_and_stdout
 01: ./write_on_stdout_and_stderr TOKEN201703271859_1 TOKEN201703271859_2 &>new_file_stderr_and_stdout
STDOUT:
 FAILURE expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 01: TOKEN201703270303_1
 FAILURE expected_to_not match_regex `TOKEN201703271859_1`
 SUCCESS expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_1
STDERR:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 FAILURE expected_to_not match_regex `TOKEN201703270303_2`
 01: TOKEN201703270303_2
 SUCCESS expected_to_not match_regex `TOKEN201703271859_1`
 FAILURE expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_2
MISC:
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_2$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_2$`
----------------------------------------------------------------
@ -120,21 +63,21 @@
 01: rm -f new_file_stderr_and_stdout
STDIN:
 01: ./write_on_stdout_and_stderr TOKEN201703270303_1 TOKEN201703270303_2 &> new_file_stderr_and_stdout
 01: ./write_on_stdout_and_stderr TOKEN201703271859_1 TOKEN201703271859_2 &> new_file_stderr_and_stdout
STDOUT:
 FAILURE expected_to_not match_regex `TOKEN201703270303_1`
 SUCCESS expected_to_not match_regex `TOKEN201703270303_2`
 01: TOKEN201703270303_1
 FAILURE expected_to_not match_regex `TOKEN201703271859_1`
 SUCCESS expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_1
STDERR:
 SUCCESS expected_to_not match_regex `TOKEN201703270303_1`
 FAILURE expected_to_not match_regex `TOKEN201703270303_2`
 01: TOKEN201703270303_2
 SUCCESS expected_to_not match_regex `TOKEN201703271859_1`
 FAILURE expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_2
MISC:
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703270303_2$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_2$`
----------------------------------------------------------------
@ -144,7 +87,7 @@
 The purpose of this test is to check that using the simicolon separator `;` with empty commands results in error.
STDIN:
 01: ./write_on_stdout TOKEN201703270303 ; ; ./exit_with_status 42
 01: ./write_on_stdout TOKEN201703271859 ; ; ./exit_with_status 42
STDOUT:
 SUCCESS expected_to be_empty
@ -160,5 +103,57 @@
 FAILURE expected_to_not exit_with_status `0`
Total tests: 36
Total failed tests: 6
Total failed tests: 4
Total pending tests: 0
38;5;160m(FAILED)
Description:
 A right redirection can be associated to the twice outputs by using `&>...`, that means `redirect stdout and stderr to ...`.
In this test, we specify the file name in a separate field.
Before test:
 01: rm -f new_file_stderr_and_stdout
STDIN:
 01: ./write_on_stdout_and_stderr TOKEN201703271859_1 TOKEN201703271859_2 &> new_file_stderr_and_stdout
STDOUT:
 FAILURE expected_to_not match_regex `TOKEN201703271859_1`
 SUCCESS expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_1
STDERR:
 SUCCESS expected_to_not match_regex `TOKEN201703271859_1`
 FAILURE expected_to_not match_regex `TOKEN201703271859_2`
 01: TOKEN201703271859_2
MISC:
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_1$`
 FAILURE expected_to create_file `new_file_stderr_and_stdout` matching_regex `TOKEN201703271859_2$`
----------------------------------------------------------------
21sh/separators/semicolon/003-parse-error-empty-inline-command (FAILED)
Description:
 The purpose of this test is to check that using the simicolon separator `;` with empty commands results in error.
STDIN:
 01: ./write_on_stdout TOKEN201703271859 ; ; ./exit_with_status 42
STDOUT:
 SUCCESS expected_to be_empty
 (no output)
STDERR:
 SUCCESS expected_to_not be_empty
 SUCCESS might match_regex `([Ss]yntax|[Pp]arse) error`
 01: syntax error near unexpected token `;'
MISC:
 SUCCESS expected_to_not exit_with_status `42`
 FAILURE expected_to_not exit_with_status `0`
Total tests: 36
Total failed tests: 4
Total pending tests: 0

View file

@ -226,7 +226,6 @@ lexer/insert_newline.c\
lexer/isrw_delim.c\
lexer/keep_last_type.c\
lexer/lexer_backslash.c\
lexer/lexer_bang.c\
lexer/lexer_bquote.c\
lexer/lexer_curly_braces.c\
lexer/lexer_default.c\

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/01 12:15:50 by jhalford #+# #+# */
/* Updated: 2017/03/27 18:17:39 by jhalford ### ########.fr */
/* Updated: 2017/03/27 19:19:22 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
@ -24,7 +24,6 @@ enum e_lexstate
HEREDOC,
NEWLINE,
DELIM,
BANG,
SEP,
WORD,
NUMBER,
@ -100,7 +99,6 @@ int lexer_default(t_list **alst, t_lexer *lexer);
int lexer_newline(t_list **alst, t_lexer *lexer);
int lexer_heredoc(t_list **alst, t_lexer *lexer);
int lexer_delim(t_list **alst, t_lexer *lexer);
int lexer_bang(t_list **alst, t_lexer *lexer);
int lexer_sep(t_list **alst, t_lexer *lexer);
int lexer_word(t_list **alst, t_lexer *lexer);
int lexer_number(t_list **alst, t_lexer *lexer);

12
42sh/log Normal file
View file

@ -0,0 +1,12 @@
Missing argument: specify the binary to test
Total tests: 0
Total failed tests: 0
Total pending tests: 0
Missing argument: specify the binary to test
Total tests: 0
Total failed tests: 0
Total pending tests: 0

View file

@ -6,7 +6,7 @@
/* By: ariard <ariard@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/01/26 00:07:05 by ariard #+# #+# */
/* Updated: 2017/03/24 15:01:55 by ariard ### ########.fr */
/* Updated: 2017/03/27 19:49:31 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
@ -60,6 +60,12 @@ static int match_words(t_token *token)
return (0);
}
static int is_bang(t_token *token)
{
return (token && token->type != TK_SEMI && token->type != TK_NEWLINE
&& token->type != TK_AMP);
}
int get_reserved_words(t_list *temp)
{
t_token *token;
@ -73,6 +79,11 @@ int get_reserved_words(t_list *temp)
token = temp->content;
if (recognization_rvwords(pv_tk, ante_token))
match_words(token);
if (token && token->type == TK_BANG && is_bang(pv_tk))
{
DG("token type is %s", read_state(token->type));
token->type = TK_WORD;
}
if (ante_token && (ante_token->type == TK_CASE
|| ante_token->type == TK_FOR)
&& ft_strncmp(token->data, "in", 2) == 0)

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/09 20:39:06 by jhalford #+# #+# */
/* Updated: 2017/03/27 18:39:55 by ariard ### ########.fr */
/* Updated: 2017/03/27 19:18:47 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
@ -24,8 +24,7 @@ t_lexstate get_state_global(t_lexer *lexer)
cl = lexer->pos ? lexer->str[lexer->pos - 1] : 0;
ret = 0;
if ((ft_is_delim(c) && (ret = DELIM))
|| ((c == '&' || c == ';' || c == '|') && (ret = SEP))
|| ((c == '!') && (ret = BANG))
|| ((c == '&' || c == ';' || c == '|' || c == '!') && (ret = SEP))
|| ((c == '\\') && (ret = BACKSLASH))
|| ((c == '\n') && (ret = NEWLINE))
|| ((c == '\'') && (ret = QUOTE))

View file

@ -1,32 +0,0 @@
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* lexer_bang.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/03/27 18:12:03 by jhalford #+# #+# */
/* Updated: 2017/03/27 18:53:10 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
#include "minishell.h"
int lexer_bang(t_list **alst, t_lexer *lexer)
{
t_token *token;
token = (*alst)->content;
if (!token->type || token->type == TK_SEMI || token->type == TK_NEWLINE
|| token->type == TK_AMP)
{
token->type = TK_BANG;
lexer->state = DEFAULT;
lexer->pos++;
return (lexer_lex(&(*alst)->next, lexer));
}
token->type = TK_WORD;
lexer->state = WORD;
lexer->pos++;
return (lexer_lex(alst, lexer));
}

View file

@ -6,7 +6,7 @@
/* By: jhalford <jhalford@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/09 17:08:51 by jhalford #+# #+# */
/* Updated: 2017/03/27 18:13:01 by jhalford ### ########.fr */
/* Updated: 2017/03/27 19:29:37 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
@ -19,7 +19,6 @@ int (*g_lexer[])(t_list **alst, t_lexer *lexer) =
&lexer_heredoc,
&lexer_newline,
&lexer_delim,
&lexer_bang,
&lexer_sep,
&lexer_word,
&lexer_number,

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2016/11/30 16:29:57 by jhalford #+# #+# */
/* Updated: 2017/03/27 18:11:51 by jhalford ### ########.fr */
/* Updated: 2017/03/27 19:53:05 by ariard ### ########.fr */
/* */
/* ************************************************************************** */
@ -23,6 +23,7 @@ int lexer_sep(t_list **alst, t_lexer *lexer)
if (token->type)
return (lexer_lex(&(*alst)->next, lexer));
c = lexer->str[lexer->pos];
token_append(token, lexer, 0, 0);
lexer->pos++;
cn = lexer->str[lexer->pos];
if (c == '&')

View file

@ -6,7 +6,7 @@
/* By: jhalford <jack@crans.org> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2016/12/03 12:07:11 by jhalford #+# #+# */
/* Updated: 2017/03/23 03:39:38 by wescande ### ########.fr */
/* Updated: 2017/03/27 19:35:55 by ariard ### ########.fr */
/* */
/* ************************************************************************** */