aboutsummaryrefslogtreecommitdiff
path: root/src/lexer
diff options
context:
space:
mode:
authornass1pro <nass1pro@gmail.com>2020-06-12 10:12:03 +0200
committerCharles <sircharlesaze@gmail.com>2020-06-13 11:42:46 +0200
commitc5cde6afd3cecb44bbe9da0c28d970524da80228 (patch)
tree98ec558582ed20a120e13b4a376fd206fb620da0 /src/lexer
parent39c04561ae4956cb836c6117789cbc7926cfbd65 (diff)
downloadminishell-c5cde6afd3cecb44bbe9da0c28d970524da80228.tar.gz
minishell-c5cde6afd3cecb44bbe9da0c28d970524da80228.tar.bz2
minishell-c5cde6afd3cecb44bbe9da0c28d970524da80228.zip
probleme
token ok reste trim and exception ok
Diffstat (limited to 'src/lexer')
-rw-r--r--src/lexer/.DS_Storebin0 -> 6148 bytes
-rw-r--r--src/lexer/lexer.c178
-rw-r--r--src/lexer/lexer_utils.c89
3 files changed, 267 insertions, 0 deletions
diff --git a/src/lexer/.DS_Store b/src/lexer/.DS_Store
new file mode 100644
index 0000000..fcaf8ef
--- /dev/null
+++ b/src/lexer/.DS_Store
Binary files differ
diff --git a/src/lexer/lexer.c b/src/lexer/lexer.c
new file mode 100644
index 0000000..994ea55
--- /dev/null
+++ b/src/lexer/lexer.c
@@ -0,0 +1,178 @@
+
+#include "lexer.h"
+
+int len_is_not_sep(char *input)
+{
+ int i;
+
+ i = -1;
+ while(input[++i])
+ {
+ if (lexer_sep(input[i]))
+ {
+ if (input[i + 1] == ' ')
+ while(input[++i] == ' ')
+ ;
+ return(i);
+ }
+ if (input[i] == '\'' || input[i] == '"')
+ return(i);
+ if (input[i] == ' ')
+ {
+ while(input[++i] == ' ')
+ ;
+ return (i);
+ }
+ }
+ return(i);
+}
+
+int check_input(char *input)
+{
+ int i;
+
+ i = 0;
+ if (lexer_sep(input[i]))
+ {
+ while(input[i] == input[i + 1])
+ i++;
+ i += lexe_space(&input[i + 1]);
+ return (i + 1);
+ }
+ if (input[i] == 39 || input[i] == '"')
+ return(lexer_verif_entre_cote(input, i));
+ if (input[i] == ' ')
+ {
+ while(input[++i] == ' ')
+ ;
+ return (i);
+ }
+ return(len_is_not_sep(&input[i]));
+}
+
+
+int check_input_out(char *input)
+{
+ int i;
+ int j;
+
+ i = 0;
+ while(input[i] != '\0')
+ {
+ j = 0;
+ j += len_is_not_sep(&input[i]);
+ if (j != 0)
+ return(j);
+ i += j;
+ j = check_input(&input[i]);
+ return(j);
+ }
+ return(0);
+}
+
+t_token *lexer_lst_token_str(char *input, int i, int j)
+{
+ t_token *lst_token;
+
+ if (!(lst_token = malloc(sizeof(t_token) * 1)))
+ return (NULL);
+ lst_token->token = 0;
+ lst_token->value = NULL;
+ if (!(lst_token->value = malloc(sizeof(char) * j + 1)))
+ return(0);
+ if (!(ft_strlcpy(lst_token->value, &input[i], j + 1)))
+ {
+ free(lst_token);
+ return(0);
+ }
+ //printf("%s-\n", lst_token->value);
+ return (lst_token);
+}
+
+enum e_token_tag token_verif_stick(t_token *lst_token)
+{
+ int i;
+
+ i = ft_strlen(lst_token->value);
+
+ if (lst_token->value[i - 1] == ' ')
+ return(lst_token->token);
+ return(lst_token->token | LTAG_STICK);
+}
+
+enum e_token_tag token_str_or_cote(t_token *lst_token)
+{
+ int i;
+
+ i = 0;
+ while(lst_token->value[i] != '\0')
+ {
+ if(lst_token->value[i] == '\'')
+ {
+ lst_token->token = LTAG_STR_SINGLE;
+ return(token_verif_stick(lst_token));
+ }
+ if(lst_token->value[i] == '"')
+ {
+ lst_token->token = LTAG_STR_DOUBLE;
+ return(token_verif_stick(lst_token));
+ }
+ else
+ {
+ lst_token->token = LTAG_STR;
+ return(token_verif_stick(lst_token));
+ }
+ i++;
+ }
+ return(0);
+}
+
+t_token *push_token_enum_and_trim(t_token *lst_token)
+{
+ enum e_token_tag tk;
+
+ tk = ret_token(lst_token->value, 0);
+ if (tk == 0)
+ {
+ lst_token->token = token_str_or_cote(lst_token);
+ }
+ printf("%s-, %d\n",lst_token->value, (int)lst_token->token);
+ return (lst_token);
+}
+
+static t_ftlst *create_token_list(char *input, t_ftlst **lst)
+{
+ t_token *lst_token;
+ t_ftlst *new;
+ int i;
+ int j;
+
+ i = 0;
+ while (i < (int)ft_strlen(input))
+ {
+ j = 0;
+ j += check_input(&input[i]);
+ lst_token = lexer_lst_token_str(input,i,j);
+ lst_token = push_token_enum_and_trim(lst_token);
+ new = ft_lstnew((void *) lst_token);
+ ft_lstpush_back(lst, new);
+ i += j;
+ }
+ return (*lst);
+}
+
+t_ftlst *lexer(char *input)
+{
+ t_ftlst **lst;
+ int i;
+
+ if (!input)
+ return (0);
+ lst = malloc(sizeof(t_ftlst *) * 1);
+ if (!lst)
+ return(0);
+ *lst = create_token_list(input, lst);
+ i = ft_lstsize(*lst);
+ free(lst);
+ return (0);
+}
diff --git a/src/lexer/lexer_utils.c b/src/lexer/lexer_utils.c
new file mode 100644
index 0000000..72d8288
--- /dev/null
+++ b/src/lexer/lexer_utils.c
@@ -0,0 +1,89 @@
+
+#include "lexer.h"
+
+
+
+enum e_token_tag ret_token_sep_redir_append(char *input, int i)
+{
+ if (input[i + 1] == '>')
+ return(LTAG_REDIR_APPEND);
+ return (LTAG_REDIR_OUT);
+
+}
+
+enum e_token_tag ret_token(char *input, int i)
+{
+ if (input[i] == ';')
+ return(LTAG_AND);
+ if (input[i] == '&')
+ return(LTAG_END);
+ if (input[i] == '|' && input[i + 1] == '|')
+ return(LTAG_OR);
+ if(input[i] == '|')
+ return(LTAG_PIPE);
+ if (input[i] == '>')
+ return(ret_token_sep_redir_append(input,i));
+ if (input[i] == '<')
+ return(LTAG_REDIR_IN);
+ if (input[i] == '(')
+ return(LTAG_PARENT_OPEN);
+ if (input[i] == ')')
+ return(LTAG_PARENT_CLOSE);
+ return(0);
+
+}
+
+
+int lexer_sep(char input)
+{
+ char *sep;
+ int i;
+
+ i = 0;
+ sep = ";&|><()";
+ while(sep[i] != '\0')
+ {
+ if(sep[i] == input)
+ return(1);
+ i++;
+ }
+ return (0);
+}
+
+int lexe_space(char *input)
+{
+ int i;
+
+ i=0;
+ while(input[i] == ' ')
+ i++;
+ return(i);
+}
+
+static int lex_verif_simple_cote(char *input, int i)
+{
+ i++;
+ while(input[i] != '\0')
+ {
+ ++i;
+ if(input[i] == '\'')
+ break;
+ }
+ if (input[i + 1] == ' ')
+ while(input[i + 1] == ' ')
+ i++;
+ return(i + 1);
+}
+
+int lexer_verif_entre_cote(char *input, int i)
+{
+ if(input[i] == '\'')
+ return(lex_verif_simple_cote(input, i));
+ i++;
+ while(input[i] != '"' && (input[i] != '\0'))
+ ++i;
+ if (input[i + 1] == ' ')
+ while(input[i + 1] == ' ')
+ i++;
+ return(i + 1);
+}