/* ************************************************************************** */ /* */ /* ::: :::::::: */ /* lexing.c :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: lperrey +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */ /* Updated: 2021/10/30 22:37:48 by lperrey ### ########.fr */ /* */ /* ************************************************************************** */ #include "minishell.h" int fill_token(t_token *t, char *input, int *i, int *t_i); static t_token *alloc_token(size_t content_len); static int tokenize_input(t_token *t, char *input, size_t input_len); t_token *input_to_tokens(char *input) { t_token *t_head; t_token *t_tmp; size_t input_len; input_len = ft_strlen(input); t_head = alloc_token(input_len); if (!t_head) return (ft_retp_perror(NULL, "alloc_token() error")); if (!tokenize_input(t_head, input, input_len)) return (ft_lstclear((t_list **)&t_head, free)); t_tmp = (t_token *)ft_lstbeforelast((t_list *)t_head); if (t_tmp && !t_tmp->next->id) { ft_lstdelone((t_list *)t_tmp->next, free); t_tmp->next = NULL; } return (t_head); } static t_token *alloc_token(size_t content_len) { t_token *token; token = ft_calloc(1, sizeof (*token)); if (!token) return (NULL); token->content = ft_calloc(content_len + 1, 1); if (!token->content) return (ft_retp_free(NULL, token, free)); return (token); } static int tokenize_input(t_token *t, char *input, size_t input_len) { int i; int t_i; i = 0; t_i = 0; while (input[i]) { if (fill_token(t, input, &i, &t_i) == DELIMITE_TOKEN && input[i] && t_i) { if (!t->id) t->id = T_WORD; t->next = alloc_token(input_len - i); if (!t->next) return (ft_reti_perror(0, "alloc_token() error")); t = t->next; t_i = 0; } } if (!t->id && t_i) t->id = T_WORD; return (1); }