Fix free for final empty token.
This commit is contained in:
@@ -6,7 +6,7 @@
|
|||||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||||
/* +#+#+#+#+#+ +#+ */
|
/* +#+#+#+#+#+ +#+ */
|
||||||
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
|
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
|
||||||
/* Updated: 2021/10/24 19:53:40 by lperrey ### ########.fr */
|
/* Updated: 2021/10/30 12:43:05 by lperrey ### ########.fr */
|
||||||
/* */
|
/* */
|
||||||
/* ************************************************************************** */
|
/* ************************************************************************** */
|
||||||
|
|
||||||
@@ -29,9 +29,19 @@ enum e_fill_token_return
|
|||||||
DELIMITE_TOKEN
|
DELIMITE_TOKEN
|
||||||
};
|
};
|
||||||
|
|
||||||
|
t_list *ft_lstbeforelast(t_list *lst) // tmp
|
||||||
|
{
|
||||||
|
if (!lst || !lst->next)
|
||||||
|
return (NULL);
|
||||||
|
while (lst->next->next)
|
||||||
|
lst = lst->next;
|
||||||
|
return (lst);
|
||||||
|
}
|
||||||
|
|
||||||
t_token *input_to_tokens(char *input)
|
t_token *input_to_tokens(char *input)
|
||||||
{
|
{
|
||||||
t_token *t_head;
|
t_token *t_head;
|
||||||
|
t_token *t_tmp;
|
||||||
size_t input_len;
|
size_t input_len;
|
||||||
|
|
||||||
input_len = ft_strlen(input);
|
input_len = ft_strlen(input);
|
||||||
@@ -40,11 +50,15 @@ t_token *input_to_tokens(char *input)
|
|||||||
return (ft_retp_perror(NULL, "alloc_token() error"));
|
return (ft_retp_perror(NULL, "alloc_token() error"));
|
||||||
if (!tokenize_input(t_head, input, input_len))
|
if (!tokenize_input(t_head, input, input_len))
|
||||||
return (ft_lstclear((t_list **)&t_head, free));
|
return (ft_lstclear((t_list **)&t_head, free));
|
||||||
|
t_tmp = ft_lstbeforelast((t_list *)t_head);
|
||||||
|
if (t_tmp && !t_tmp->next->id)
|
||||||
|
{
|
||||||
|
ft_lstdelone((t_list *)t_tmp->next, free);
|
||||||
|
t_tmp->next = NULL;
|
||||||
|
}
|
||||||
return (t_head);
|
return (t_head);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO : Fix final space saved after a pipe like in "cmd | "
|
|
||||||
// "cmd | " should behave like "cmd |"
|
|
||||||
static int tokenize_input(t_token *t, char *input, size_t input_len)
|
static int tokenize_input(t_token *t, char *input, size_t input_len)
|
||||||
{
|
{
|
||||||
int i;
|
int i;
|
||||||
@@ -65,10 +79,8 @@ static int tokenize_input(t_token *t, char *input, size_t input_len)
|
|||||||
t_i = 0;
|
t_i = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!t->id && t_i) // Fix parser syntax, but last elem must still be free
|
if (!t->id && t_i)
|
||||||
t->id = T_WORD;
|
t->id = T_WORD;
|
||||||
/* if (!t->id)
|
|
||||||
t->id = T_WORD; */
|
|
||||||
return (1);
|
return (1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user