refactoring lexing

This commit is contained in:
LuckyLaszlo
2021-12-01 16:00:36 +01:00
parent ffd06c4b95
commit 025ef76c01
5 changed files with 47 additions and 63 deletions

View File

@@ -6,7 +6,7 @@
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
/* Updated: 2021/11/07 03:18:38 by lperrey ### ########.fr */
/* Updated: 2021/12/01 14:54:22 by lperrey ### ########.fr */
/* */
/* ************************************************************************** */
@@ -14,19 +14,18 @@
int fill_token(t_token *t, char *input, int *i, int *t_i);
static t_token *alloc_token(size_t content_len); // TODO, a remplacer par ft_lstnew_generic()
static int tokenize_input(t_token *t, char *input, size_t input_len);
t_token *input_to_tokens(char *input)
t_token *lexing(char *input)
{
t_token *t_head;
t_token *t_tmp;
size_t input_len;
input_len = ft_strlen(input);
t_head = alloc_token(input_len);
t_head = ft_lstnew_generic(sizeof(t_token), input_len + 1);
if (!t_head)
return (ft_retp_perror(NULL, "alloc_token() error"));
return (ft_retp_perror(NULL, "ft_lstnew_generic() error"));
if (!tokenize_input(t_head, input, input_len))
return (ft_lstclear((t_list **)&t_head, free));
t_tmp = (t_token *)ft_lstbeforelast((t_list *)t_head);
@@ -38,19 +37,6 @@ t_token *input_to_tokens(char *input)
return (t_head);
}
static t_token *alloc_token(size_t content_len)
{
t_token *token;
token = ft_calloc(1, sizeof (*token));
if (!token)
return (NULL);
token->content = ft_calloc(content_len + 1, 1);
if (!token->content)
return (ft_retp_free(NULL, token, free));
return (token);
}
static int tokenize_input(t_token *t, char *input, size_t input_len)
{
int i;
@@ -64,9 +50,9 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
{
if (!t->id)
t->id = T_WORD;
t->next = alloc_token(input_len - i);
t->next = ft_lstnew_generic(sizeof(t_token), input_len + 1 - i);
if (!t->next)
return (ft_reti_perror(0, "alloc_token() error"));
return (ft_reti_perror(0, "ft_lstnew_generic() error"));
t = t->next;
t_i = 0;
}
@@ -75,3 +61,8 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
t->id = T_WORD;
return (1);
}
/*
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
#tag_18_03
*/