blob: 948afd0d3168ee2cd35d4b8f58abfa134b720d99 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
|
// the lexer acts as a table builder from a csv file
// and parsing indivudal rows and columns
// the next step would be building a the b-tree
internal b32
is_alpha(u8 point)
{
return ((point >= 'a' && point <= 'z') || (point >= 'A' && point <= 'Z') || (point == '_'));
}
internal b32
is_digit(u8 point)
{
return (point >= '0' && point <= '9');
}
internal b32
is_alpha_num(u8 point)
{
return (is_alpha(point) || is_digit(point));
}
internal b32
is_whitespace(u8 point)
{
return (point == '\n' || point == '\r' || point == ' ' || point == '\t');
}
internal b32
is_delimiter(u8 point)
{
return (point == ',');
}
internal token *
tokenize_csv(string8 buffer, csv_table *global_table, mem_arena *arena)
{
i32 count = 0;
string8 **tokens = PushArray(arena, string8 *, buffer.size / 10);
b32 first_line = 1;
if(buffer.size < 0) return NULL;
for(i32 index = 0;
buffer.data[index] != '\0';
++index)
{
csv_row *row = PushStruct(arena, csv_row);
string8 token = {0};
u8 point = buffer.data[index];
u8 *start = buffer.data;
u8 *end = NULL;
unused(row);
switch (point)
{
case '\n':
{
first_line = -1;
break;
}
case ',':
{
end = start - 1;
if (first_line)
{
global_table->headers = &token;
++global_table->headers;
break;
}
else
{
break;
}
}
default:
{
printf("point: %c\n", point);
count++;
break;
}
}
token = (string8){
.data = start,
.size = end - start,
};
**tokens = token;
++*tokens;
}
printf("%d", count);
return NULL;
}
|