refactoring lexing
This commit is contained in:
@@ -6,7 +6,7 @@
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/10/08 02:59:58 by lperrey #+# #+# */
|
||||
/* Updated: 2021/11/30 13:12:04 by lperrey ### ########.fr */
|
||||
/* Updated: 2021/12/01 14:42:38 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
@@ -32,7 +32,7 @@ void shell_loop(t_all *c);
|
||||
void shell_script(t_all *c);
|
||||
|
||||
// Lexer
|
||||
t_token *input_to_tokens(char *input);
|
||||
t_token *lexing(char *input);
|
||||
|
||||
// Parser
|
||||
t_cmd **parsing(t_token *token_list);
|
||||
@@ -81,7 +81,7 @@ char **ft_dup_2d_char_arr(char **ptr);
|
||||
void *ft_resize_2d_arr(void *ptr, size_t add_nbr);
|
||||
void print_matrix(char **matrix, char *sep);
|
||||
t_list *ft_lstbeforelast(t_list *lst);
|
||||
t_list *ft_lstnew_generic(size_t lst_sizse, size_t content_size);
|
||||
void *ft_lstnew_generic(size_t lst_size, size_t content_size);
|
||||
typedef void *(*t_dup_f)(void *);
|
||||
void *ft_dup_2d_arr(void *ptr, void *(*dup_func)(void *));
|
||||
void ft_perror_io(char *err_str, char *io_file);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/10/08 09:25:35 by lperrey #+# #+# */
|
||||
/* Updated: 2021/11/30 13:11:48 by lperrey ### ########.fr */
|
||||
/* Updated: 2021/11/30 18:51:00 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
@@ -175,7 +175,8 @@ t_list *ft_lstbeforelast(t_list *lst)
|
||||
return (lst);
|
||||
}
|
||||
|
||||
t_list *ft_lstnew_generic(size_t lst_size, size_t content_size)
|
||||
/* if "content_size == 0", return lst with "lst->content == NULL" */
|
||||
void *ft_lstnew_generic(size_t lst_size, size_t content_size)
|
||||
{
|
||||
t_list *elem;
|
||||
void *content;
|
||||
|
||||
@@ -6,45 +6,34 @@
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
|
||||
/* Updated: 2021/11/26 21:02:10 by lperrey ### ########.fr */
|
||||
/* Updated: 2021/12/01 15:48:05 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
#include "minishell.h"
|
||||
|
||||
int check_operators(t_token *t, char *input, int *i, int *t_i);
|
||||
int check_operators(t_token *t, char *input, int *i, int *t_i);
|
||||
|
||||
static int quoting(int *quotes_state, char *input, int *i);
|
||||
|
||||
int fill_token(t_token *t, char *input, int *i, int *t_i)
|
||||
{
|
||||
static int in_quotes = 0;
|
||||
static int quotes_state = 0;
|
||||
|
||||
// operators
|
||||
if (!in_quotes)
|
||||
if (!quotes_state)
|
||||
{
|
||||
if (check_operators(t, input, i, t_i) == DELIMITE_TOKEN)
|
||||
return (DELIMITE_TOKEN);
|
||||
}
|
||||
// quoting
|
||||
if (input[*i] == '\'' && in_quotes != IN_DQUOTES)
|
||||
if (quoting("es_state, input, i))
|
||||
{
|
||||
t->content[(*t_i)++] = input[(*i)++];
|
||||
if (in_quotes == IN_QUOTES)
|
||||
in_quotes = 0;
|
||||
else if (ft_strchr(&input[*i], '\'')) // if closed quotes
|
||||
in_quotes = IN_QUOTES;
|
||||
return (CONTINUE_TOKEN);
|
||||
}
|
||||
else if (input[*i] == '\"' && in_quotes != IN_QUOTES)
|
||||
{
|
||||
t->content[(*t_i)++] = input[(*i)++];
|
||||
if (in_quotes == IN_DQUOTES)
|
||||
in_quotes = 0;
|
||||
else if (ft_strchr(&input[*i], '\"')) // if closed dquotes
|
||||
in_quotes = IN_DQUOTES;
|
||||
return (CONTINUE_TOKEN);
|
||||
}
|
||||
// blanks
|
||||
if (!in_quotes && (input[*i] == ' ' || input[*i] == '\t'))
|
||||
if (!quotes_state && (input[*i] == ' ' || input[*i] == '\t'))
|
||||
{
|
||||
while (input[*i] == ' ' || input[*i] == '\t')
|
||||
(*i)++;
|
||||
@@ -55,17 +44,23 @@ int fill_token(t_token *t, char *input, int *i, int *t_i)
|
||||
return (CONTINUE_TOKEN);
|
||||
}
|
||||
|
||||
/*
|
||||
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_03
|
||||
1 - OK
|
||||
2 - OK
|
||||
3 - OK
|
||||
4 - OK
|
||||
5 - OK / SEMI-OSEF
|
||||
6 - OK
|
||||
7 - OK
|
||||
8 - OK
|
||||
9 - OSEF
|
||||
10 - OK
|
||||
|
||||
*/
|
||||
static int quoting(int *quotes_state, char *input, int *i)
|
||||
{
|
||||
if (input[*i] == '\'' && *quotes_state != IN_DQUOTES)
|
||||
{
|
||||
if (*quotes_state == IN_QUOTES)
|
||||
*quotes_state = 0;
|
||||
else if (ft_strchr(&input[*i + 1], '\'')) // if closed quotes
|
||||
*quotes_state = IN_QUOTES;
|
||||
return (CONTINUE_TOKEN);
|
||||
}
|
||||
else if (input[*i] == '\"' && *quotes_state != IN_QUOTES)
|
||||
{
|
||||
if (*quotes_state == IN_DQUOTES)
|
||||
*quotes_state = 0;
|
||||
else if (ft_strchr(&input[*i + 1], '\"')) // if closed dquotes
|
||||
*quotes_state = IN_DQUOTES;
|
||||
return (CONTINUE_TOKEN);
|
||||
}
|
||||
return (0);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
|
||||
/* Updated: 2021/11/07 03:18:38 by lperrey ### ########.fr */
|
||||
/* Updated: 2021/12/01 14:54:22 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
@@ -14,19 +14,18 @@
|
||||
|
||||
int fill_token(t_token *t, char *input, int *i, int *t_i);
|
||||
|
||||
static t_token *alloc_token(size_t content_len); // TODO, a remplacer par ft_lstnew_generic()
|
||||
static int tokenize_input(t_token *t, char *input, size_t input_len);
|
||||
|
||||
t_token *input_to_tokens(char *input)
|
||||
t_token *lexing(char *input)
|
||||
{
|
||||
t_token *t_head;
|
||||
t_token *t_tmp;
|
||||
size_t input_len;
|
||||
|
||||
input_len = ft_strlen(input);
|
||||
t_head = alloc_token(input_len);
|
||||
t_head = ft_lstnew_generic(sizeof(t_token), input_len + 1);
|
||||
if (!t_head)
|
||||
return (ft_retp_perror(NULL, "alloc_token() error"));
|
||||
return (ft_retp_perror(NULL, "ft_lstnew_generic() error"));
|
||||
if (!tokenize_input(t_head, input, input_len))
|
||||
return (ft_lstclear((t_list **)&t_head, free));
|
||||
t_tmp = (t_token *)ft_lstbeforelast((t_list *)t_head);
|
||||
@@ -38,19 +37,6 @@ t_token *input_to_tokens(char *input)
|
||||
return (t_head);
|
||||
}
|
||||
|
||||
static t_token *alloc_token(size_t content_len)
|
||||
{
|
||||
t_token *token;
|
||||
|
||||
token = ft_calloc(1, sizeof (*token));
|
||||
if (!token)
|
||||
return (NULL);
|
||||
token->content = ft_calloc(content_len + 1, 1);
|
||||
if (!token->content)
|
||||
return (ft_retp_free(NULL, token, free));
|
||||
return (token);
|
||||
}
|
||||
|
||||
static int tokenize_input(t_token *t, char *input, size_t input_len)
|
||||
{
|
||||
int i;
|
||||
@@ -64,9 +50,9 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
|
||||
{
|
||||
if (!t->id)
|
||||
t->id = T_WORD;
|
||||
t->next = alloc_token(input_len - i);
|
||||
t->next = ft_lstnew_generic(sizeof(t_token), input_len + 1 - i);
|
||||
if (!t->next)
|
||||
return (ft_reti_perror(0, "alloc_token() error"));
|
||||
return (ft_reti_perror(0, "ft_lstnew_generic() error"));
|
||||
t = t->next;
|
||||
t_i = 0;
|
||||
}
|
||||
@@ -75,3 +61,8 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
|
||||
t->id = T_WORD;
|
||||
return (1);
|
||||
}
|
||||
|
||||
/*
|
||||
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
|
||||
#tag_18_03
|
||||
*/
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/10/04 05:59:26 by lperrey #+# #+# */
|
||||
/* Updated: 2021/11/30 13:30:52 by lperrey ### ########.fr */
|
||||
/* Updated: 2021/12/01 14:45:04 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
@@ -27,17 +27,14 @@ void shell_loop(t_all *c)
|
||||
if (line_input && *line_input)
|
||||
{
|
||||
add_history(line_input);
|
||||
// Lexing
|
||||
c->token_list = input_to_tokens(line_input);
|
||||
c->token_list = lexing(line_input);
|
||||
ft_free_null(&line_input);
|
||||
if (!c->token_list)
|
||||
continue ;
|
||||
// Parsing
|
||||
c->pipeline = parsing(c->token_list);
|
||||
ft_lstclear((t_list **)&c->token_list, free);
|
||||
if (!c->pipeline)
|
||||
continue ;
|
||||
// Exec Pipeline
|
||||
exec_cmd_line(c);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user