refactoring lexing

This commit is contained in:
LuckyLaszlo
2021-12-01 16:00:36 +01:00
parent ffd06c4b95
commit 025ef76c01
5 changed files with 47 additions and 63 deletions

View File

@@ -6,7 +6,7 @@
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/08 09:25:35 by lperrey #+# #+# */
/* Updated: 2021/11/30 13:11:48 by lperrey ### ########.fr */
/* Updated: 2021/11/30 18:51:00 by lperrey ### ########.fr */
/* */
/* ************************************************************************** */
@@ -175,7 +175,8 @@ t_list *ft_lstbeforelast(t_list *lst)
return (lst);
}
t_list *ft_lstnew_generic(size_t lst_size, size_t content_size)
/* if "content_size == 0", return lst with "lst->content == NULL" */
void *ft_lstnew_generic(size_t lst_size, size_t content_size)
{
t_list *elem;
void *content;

View File

@@ -6,45 +6,34 @@
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
/* Updated: 2021/11/26 21:02:10 by lperrey ### ########.fr */
/* Updated: 2021/12/01 15:48:05 by lperrey ### ########.fr */
/* */
/* ************************************************************************** */
#include "minishell.h"
int check_operators(t_token *t, char *input, int *i, int *t_i);
int check_operators(t_token *t, char *input, int *i, int *t_i);
static int quoting(int *quotes_state, char *input, int *i);
int fill_token(t_token *t, char *input, int *i, int *t_i)
{
static int in_quotes = 0;
static int quotes_state = 0;
// operators
if (!in_quotes)
if (!quotes_state)
{
if (check_operators(t, input, i, t_i) == DELIMITE_TOKEN)
return (DELIMITE_TOKEN);
}
// quoting
if (input[*i] == '\'' && in_quotes != IN_DQUOTES)
if (quoting(&quotes_state, input, i))
{
t->content[(*t_i)++] = input[(*i)++];
if (in_quotes == IN_QUOTES)
in_quotes = 0;
else if (ft_strchr(&input[*i], '\'')) // if closed quotes
in_quotes = IN_QUOTES;
return (CONTINUE_TOKEN);
}
else if (input[*i] == '\"' && in_quotes != IN_QUOTES)
{
t->content[(*t_i)++] = input[(*i)++];
if (in_quotes == IN_DQUOTES)
in_quotes = 0;
else if (ft_strchr(&input[*i], '\"')) // if closed dquotes
in_quotes = IN_DQUOTES;
return (CONTINUE_TOKEN);
}
// blanks
if (!in_quotes && (input[*i] == ' ' || input[*i] == '\t'))
if (!quotes_state && (input[*i] == ' ' || input[*i] == '\t'))
{
while (input[*i] == ' ' || input[*i] == '\t')
(*i)++;
@@ -55,17 +44,23 @@ int fill_token(t_token *t, char *input, int *i, int *t_i)
return (CONTINUE_TOKEN);
}
/*
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_03
1 - OK
2 - OK
3 - OK
4 - OK
5 - OK / SEMI-OSEF
6 - OK
7 - OK
8 - OK
9 - OSEF
10 - OK
*/
static int quoting(int *quotes_state, char *input, int *i)
{
if (input[*i] == '\'' && *quotes_state != IN_DQUOTES)
{
if (*quotes_state == IN_QUOTES)
*quotes_state = 0;
else if (ft_strchr(&input[*i + 1], '\'')) // if closed quotes
*quotes_state = IN_QUOTES;
return (CONTINUE_TOKEN);
}
else if (input[*i] == '\"' && *quotes_state != IN_QUOTES)
{
if (*quotes_state == IN_DQUOTES)
*quotes_state = 0;
else if (ft_strchr(&input[*i + 1], '\"')) // if closed dquotes
*quotes_state = IN_DQUOTES;
return (CONTINUE_TOKEN);
}
return (0);
}

View File

@@ -6,7 +6,7 @@
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
/* Updated: 2021/11/07 03:18:38 by lperrey ### ########.fr */
/* Updated: 2021/12/01 14:54:22 by lperrey ### ########.fr */
/* */
/* ************************************************************************** */
@@ -14,19 +14,18 @@
int fill_token(t_token *t, char *input, int *i, int *t_i);
static t_token *alloc_token(size_t content_len); // TODO, a remplacer par ft_lstnew_generic()
static int tokenize_input(t_token *t, char *input, size_t input_len);
t_token *input_to_tokens(char *input)
t_token *lexing(char *input)
{
t_token *t_head;
t_token *t_tmp;
size_t input_len;
input_len = ft_strlen(input);
t_head = alloc_token(input_len);
t_head = ft_lstnew_generic(sizeof(t_token), input_len + 1);
if (!t_head)
return (ft_retp_perror(NULL, "alloc_token() error"));
return (ft_retp_perror(NULL, "ft_lstnew_generic() error"));
if (!tokenize_input(t_head, input, input_len))
return (ft_lstclear((t_list **)&t_head, free));
t_tmp = (t_token *)ft_lstbeforelast((t_list *)t_head);
@@ -38,19 +37,6 @@ t_token *input_to_tokens(char *input)
return (t_head);
}
static t_token *alloc_token(size_t content_len)
{
t_token *token;
token = ft_calloc(1, sizeof (*token));
if (!token)
return (NULL);
token->content = ft_calloc(content_len + 1, 1);
if (!token->content)
return (ft_retp_free(NULL, token, free));
return (token);
}
static int tokenize_input(t_token *t, char *input, size_t input_len)
{
int i;
@@ -64,9 +50,9 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
{
if (!t->id)
t->id = T_WORD;
t->next = alloc_token(input_len - i);
t->next = ft_lstnew_generic(sizeof(t_token), input_len + 1 - i);
if (!t->next)
return (ft_reti_perror(0, "alloc_token() error"));
return (ft_reti_perror(0, "ft_lstnew_generic() error"));
t = t->next;
t_i = 0;
}
@@ -75,3 +61,8 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
t->id = T_WORD;
return (1);
}
/*
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
#tag_18_03
*/

View File

@@ -6,7 +6,7 @@
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/04 05:59:26 by lperrey #+# #+# */
/* Updated: 2021/11/30 13:30:52 by lperrey ### ########.fr */
/* Updated: 2021/12/01 14:45:04 by lperrey ### ########.fr */
/* */
/* ************************************************************************** */
@@ -27,17 +27,14 @@ void shell_loop(t_all *c)
if (line_input && *line_input)
{
add_history(line_input);
// Lexing
c->token_list = input_to_tokens(line_input);
c->token_list = lexing(line_input);
ft_free_null(&line_input);
if (!c->token_list)
continue ;
// Parsing
c->pipeline = parsing(c->token_list);
ft_lstclear((t_list **)&c->token_list, free);
if (!c->pipeline)
continue ;
// Exec Pipeline
exec_cmd_line(c);
}
}