Merged revisions 78818 via svnmerge from
[python/dscho.git] / Parser / parsetok.c
blob14703273c95cb3b7994892ebc9e90caba323181c
2 /* Parser-tokenizer link implementation */
4 #include "pgenheaders.h"
5 #include "tokenizer.h"
6 #include "node.h"
7 #include "grammar.h"
8 #include "parser.h"
9 #include "parsetok.h"
10 #include "errcode.h"
11 #include "graminit.h"
14 /* Forward */
15 static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int *);
16 static void initerr(perrdetail *err_ret, const char* filename);
18 /* Parse input coming from a string. Return error code, print some errors. */
19 node *
20 PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
22 return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
25 node *
26 PyParser_ParseStringFlags(const char *s, grammar *g, int start,
27 perrdetail *err_ret, int flags)
29 return PyParser_ParseStringFlagsFilename(s, NULL,
30 g, start, err_ret, flags);
33 node *
34 PyParser_ParseStringFlagsFilename(const char *s, const char *filename,
35 grammar *g, int start,
36 perrdetail *err_ret, int flags)
38 int iflags = flags;
39 return PyParser_ParseStringFlagsFilenameEx(s, filename, g, start,
40 err_ret, &iflags);
43 node *
44 PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename,
45 grammar *g, int start,
46 perrdetail *err_ret, int *flags)
48 struct tok_state *tok;
50 initerr(err_ret, filename);
52 if (*flags & PyPARSE_IGNORE_COOKIE)
53 tok = PyTokenizer_FromUTF8(s);
54 else
55 tok = PyTokenizer_FromString(s);
56 if (tok == NULL) {
57 err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;
58 return NULL;
61 tok->filename = filename ? filename : "<string>";
62 return parsetok(tok, g, start, err_ret, flags);
65 /* Parse input coming from a file. Return error code, print some errors. */
67 node *
68 PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
69 char *ps1, char *ps2, perrdetail *err_ret)
71 return PyParser_ParseFileFlags(fp, filename, NULL,
72 g, start, ps1, ps2, err_ret, 0);
75 node *
76 PyParser_ParseFileFlags(FILE *fp, const char *filename, const char *enc,
77 grammar *g, int start,
78 char *ps1, char *ps2, perrdetail *err_ret, int flags)
80 int iflags = flags;
81 return PyParser_ParseFileFlagsEx(fp, filename, enc, g, start, ps1,
82 ps2, err_ret, &iflags);
85 node *
86 PyParser_ParseFileFlagsEx(FILE *fp, const char *filename,
87 const char *enc, grammar *g, int start,
88 char *ps1, char *ps2, perrdetail *err_ret, int *flags)
90 struct tok_state *tok;
92 initerr(err_ret, filename);
94 if ((tok = PyTokenizer_FromFile(fp, (char *)enc, ps1, ps2)) == NULL) {
95 err_ret->error = E_NOMEM;
96 return NULL;
98 tok->filename = filename;
99 return parsetok(tok, g, start, err_ret, flags);
102 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
103 #if 0
104 static char with_msg[] =
105 "%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
107 static char as_msg[] =
108 "%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";
110 static void
111 warn(const char *msg, const char *filename, int lineno)
113 if (filename == NULL)
114 filename = "<string>";
115 PySys_WriteStderr(msg, filename, lineno);
117 #endif
118 #endif
120 /* Parse input coming from the given tokenizer structure.
121 Return error code. */
123 static node *
124 parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
125 int *flags)
127 parser_state *ps;
128 node *n;
129 int started = 0, handling_import = 0, handling_with = 0;
131 if ((ps = PyParser_New(g, start)) == NULL) {
132 fprintf(stderr, "no mem for new parser\n");
133 err_ret->error = E_NOMEM;
134 PyTokenizer_Free(tok);
135 return NULL;
137 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
138 if (*flags & PyPARSE_BARRY_AS_BDFL)
139 ps->p_flags |= CO_FUTURE_BARRY_AS_BDFL;
140 #endif
142 for (;;) {
143 char *a, *b;
144 int type;
145 size_t len;
146 char *str;
147 int col_offset;
149 type = PyTokenizer_Get(tok, &a, &b);
150 if (type == ERRORTOKEN) {
151 err_ret->error = tok->done;
152 break;
154 if (type == ENDMARKER && started) {
155 type = NEWLINE; /* Add an extra newline */
156 handling_with = handling_import = 0;
157 started = 0;
158 /* Add the right number of dedent tokens,
159 except if a certain flag is given --
160 codeop.py uses this. */
161 if (tok->indent &&
162 !(*flags & PyPARSE_DONT_IMPLY_DEDENT))
164 tok->pendin = -tok->indent;
165 tok->indent = 0;
168 else
169 started = 1;
170 len = b - a; /* XXX this may compute NULL - NULL */
171 str = (char *) PyObject_MALLOC(len + 1);
172 if (str == NULL) {
173 fprintf(stderr, "no mem for next token\n");
174 err_ret->error = E_NOMEM;
175 break;
177 if (len > 0)
178 strncpy(str, a, len);
179 str[len] = '\0';
181 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
182 if (type == NOTEQUAL) {
183 if (!(ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) &&
184 strcmp(str, "!=")) {
185 err_ret->error = E_SYNTAX;
186 break;
188 else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) &&
189 strcmp(str, "<>")) {
190 err_ret->text = "with Barry as BDFL, use '<>' "
191 "instead of '!='";
192 err_ret->error = E_SYNTAX;
193 break;
196 #endif
197 if (a >= tok->line_start)
198 col_offset = a - tok->line_start;
199 else
200 col_offset = -1;
202 if ((err_ret->error =
203 PyParser_AddToken(ps, (int)type, str,
204 tok->lineno, col_offset,
205 &(err_ret->expected))) != E_OK) {
206 if (err_ret->error != E_DONE) {
207 PyObject_FREE(str);
208 err_ret->token = type;
210 break;
214 if (err_ret->error == E_DONE) {
215 n = ps->p_tree;
216 ps->p_tree = NULL;
218 else
219 n = NULL;
221 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
222 *flags = ps->p_flags;
223 #endif
224 PyParser_Delete(ps);
226 if (n == NULL) {
227 if (tok->lineno <= 1 && tok->done == E_EOF)
228 err_ret->error = E_EOF;
229 err_ret->lineno = tok->lineno;
230 if (tok->buf != NULL) {
231 size_t len;
232 assert(tok->cur - tok->buf < INT_MAX);
233 err_ret->offset = (int)(tok->cur - tok->buf);
234 len = tok->inp - tok->buf;
235 err_ret->text = (char *) PyObject_MALLOC(len + 1);
236 if (err_ret->text != NULL) {
237 if (len > 0)
238 strncpy(err_ret->text, tok->buf, len);
239 err_ret->text[len] = '\0';
242 } else if (tok->encoding != NULL) {
243 node* r = PyNode_New(encoding_decl);
244 if (!r) {
245 err_ret->error = E_NOMEM;
246 n = NULL;
247 goto done;
249 r->n_str = tok->encoding;
250 r->n_nchildren = 1;
251 r->n_child = n;
252 tok->encoding = NULL;
253 n = r;
256 done:
257 PyTokenizer_Free(tok);
259 return n;
262 static void
263 initerr(perrdetail *err_ret, const char *filename)
265 err_ret->error = E_OK;
266 err_ret->filename = filename;
267 err_ret->lineno = 0;
268 err_ret->offset = 0;
269 err_ret->text = NULL;
270 err_ret->token = -1;
271 err_ret->expected = -1;