expand_redirection() : rejoin token list on error
+ words_expansions.c -> expansions.c + generic.c misc
This commit is contained in:
82
srcs/parsing/expansions/expansions.c
Normal file
82
srcs/parsing/expansions/expansions.c
Normal file
@@ -0,0 +1,82 @@
|
||||
/* ************************************************************************** */
|
||||
/* */
|
||||
/* ::: :::::::: */
|
||||
/* expansions.c :+: :+: :+: */
|
||||
/* +:+ +:+ +:+ */
|
||||
/* By: lperrey <lperrey@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2021/11/07 02:01:33 by lperrey #+# #+# */
|
||||
/* Updated: 2021/12/11 20:10:37 by lperrey ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
#include "minishell.h"
|
||||
|
||||
t_list *expand_token(char *content);
|
||||
char *rejoin_after_expand(t_list *expand_lst);
|
||||
int new_token_for_each_field(char **fields, t_token *t);
|
||||
|
||||
// 1 - chaque bout dans un element d'une t_list
|
||||
// (telle quelle si non expand, ou VARIABLE de env)
|
||||
// 2 - strjoin() le tout
|
||||
// 3 - split avec un ft_strplit() modifié (ne splitant pas dans les quotes)
|
||||
// 4 - quotes removal, ft_strdup_quotes() le tableau split
|
||||
// 5 - creer un token T_WORD pour chaque *string du **split_arr
|
||||
// (ft_lstadd_front() sur le token original, puis set le token orignal à :
|
||||
// t->id = 0 ; free(t->content) ; t->content = NULL ; pour qu'il soit ignoré sur la suite du parsing)
|
||||
|
||||
int expansions(t_token *t, t_cmd **pipeline)
|
||||
{
|
||||
int i;
|
||||
t_token *next_token;
|
||||
|
||||
i = 0;
|
||||
while (t)
|
||||
{
|
||||
if (t->id == '|')
|
||||
i++;
|
||||
if (!pipeline[i]->error && t->id == T_WORD)
|
||||
{
|
||||
next_token = t->next;
|
||||
if (!token_expansions(t))
|
||||
{
|
||||
pipeline[i]->error = EXIT_EXPANSION;
|
||||
}
|
||||
while (t != next_token)
|
||||
t = t->next;
|
||||
}
|
||||
else
|
||||
t = t->next;
|
||||
}
|
||||
return (1);
|
||||
}
|
||||
|
||||
int token_expansions(t_token *t)
|
||||
{
|
||||
void *tmp;
|
||||
char **tmp_split;
|
||||
|
||||
// 1
|
||||
tmp = (t_list*)expand_token(t->content);
|
||||
if (!tmp)
|
||||
return (0);
|
||||
// 2
|
||||
tmp = (char*)rejoin_after_expand(tmp);
|
||||
if (!tmp)
|
||||
return (0);
|
||||
// 3
|
||||
tmp_split = ft_split_quotes(tmp, ' ');
|
||||
free(tmp);
|
||||
if (!tmp_split)
|
||||
return (0);
|
||||
// 4
|
||||
tmp = (char**)ft_dup_2d_arr(tmp_split, (t_dup_f)ft_strdup_quotes);
|
||||
ft_free_2d_arr(tmp_split);
|
||||
tmp_split = tmp;
|
||||
if (!tmp_split)
|
||||
return (0);
|
||||
// 5
|
||||
if (!new_token_for_each_field(tmp_split, t))
|
||||
return (0);
|
||||
return (1);
|
||||
}
|
||||
Reference in New Issue
Block a user