70 lines
2.1 KiB
C
70 lines
2.1 KiB
C
/* ************************************************************************** */
|
|
/* */
|
|
/* ::: :::::::: */
|
|
/* lexing.c :+: :+: :+: */
|
|
/* +:+ +:+ +:+ */
|
|
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
|
/* +#+#+#+#+#+ +#+ */
|
|
/* Created: 2021/10/19 08:38:55 by lperrey #+# #+# */
|
|
/* Updated: 2021/12/05 17:11:31 by lperrey ### ########.fr */
|
|
/* */
|
|
/* ************************************************************************** */
|
|
|
|
#include "minishell.h"
|
|
|
|
int fill_token(t_token *t, char *input, int *i, int *t_i);
|
|
|
|
static int tokenize_input(t_token *t, char *input, size_t input_len);
|
|
|
|
t_token *lexing(char *input)
|
|
{
|
|
t_token *t_head;
|
|
t_token *t_tmp;
|
|
size_t input_len;
|
|
|
|
input_len = ft_strlen(input);
|
|
t_head = ft_lstnew_generic(sizeof(t_token), input_len + 1);
|
|
if (!t_head)
|
|
return (ft_retp_perror(NULL, "ft_lstnew_generic() error"));
|
|
if (!tokenize_input(t_head, input, input_len))
|
|
return (ft_lstclear((t_list **)&t_head, free));
|
|
t_tmp = (t_token *)ft_lstbeforelast((t_list *)t_head);
|
|
if (t_tmp && t_tmp->next && !t_tmp->next->id)
|
|
{
|
|
ft_lstdelone((t_list *)t_tmp->next, free);
|
|
t_tmp->next = NULL;
|
|
}
|
|
return (t_head);
|
|
}
|
|
|
|
static int tokenize_input(t_token *t, char *input, size_t input_len)
|
|
{
|
|
int i;
|
|
int t_i;
|
|
|
|
i = 0;
|
|
t_i = 0;
|
|
while (input[i])
|
|
{
|
|
if (fill_token(t, input, &i, &t_i) == DELIMITE_TOKEN && input[i] && t_i)
|
|
{
|
|
if (!t->id)
|
|
t->id = T_WORD;
|
|
t->next = ft_lstnew_generic(sizeof(t_token), input_len + 1 - i);
|
|
if (!t->next)
|
|
return (ft_reti_perror(0, "ft_lstnew_generic() error"));
|
|
t = t->next;
|
|
t_i = 0;
|
|
}
|
|
}
|
|
if (!t->id && t_i)
|
|
t->id = T_WORD;
|
|
return (1);
|
|
}
|
|
|
|
/*
|
|
2.3 Token Recognition
|
|
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
|
|
#tag_18_03
|
|
*/
|