Splits Modules/_bsddb.c up into bsddb.h and _bsddb.c and adds a C API
[python.git] / Parser / parsetok.c
blobf3d8462a8998ded7e89d4a496d3d45e8c1affbfd
2 /* Parser-tokenizer link implementation */
4 #include "pgenheaders.h"
5 #include "tokenizer.h"
6 #include "node.h"
7 #include "grammar.h"
8 #include "parser.h"
9 #include "parsetok.h"
10 #include "errcode.h"
11 #include "graminit.h"
13 int Py_TabcheckFlag;
16 /* Forward */
17 static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int);
18 static void initerr(perrdetail *err_ret, const char* filename);
20 /* Parse input coming from a string. Return error code, print some errors. */
21 node *
22 PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
24 return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
27 node *
28 PyParser_ParseStringFlags(const char *s, grammar *g, int start,
29 perrdetail *err_ret, int flags)
31 return PyParser_ParseStringFlagsFilename(s, NULL,
32 g, start, err_ret, flags);
35 node *
36 PyParser_ParseStringFlagsFilename(const char *s, const char *filename,
37 grammar *g, int start,
38 perrdetail *err_ret, int flags)
40 struct tok_state *tok;
42 initerr(err_ret, filename);
44 if ((tok = PyTokenizer_FromString(s)) == NULL) {
45 err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;
46 return NULL;
49 tok->filename = filename ? filename : "<string>";
50 if (Py_TabcheckFlag || Py_VerboseFlag) {
51 tok->altwarning = (tok->filename != NULL);
52 if (Py_TabcheckFlag >= 2)
53 tok->alterror++;
56 return parsetok(tok, g, start, err_ret, flags);
59 /* Parse input coming from a file. Return error code, print some errors. */
61 node *
62 PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
63 char *ps1, char *ps2, perrdetail *err_ret)
65 return PyParser_ParseFileFlags(fp, filename, g, start, ps1, ps2,
66 err_ret, 0);
69 node *
70 PyParser_ParseFileFlags(FILE *fp, const char *filename, grammar *g, int start,
71 char *ps1, char *ps2, perrdetail *err_ret, int flags)
73 struct tok_state *tok;
75 initerr(err_ret, filename);
77 if ((tok = PyTokenizer_FromFile(fp, ps1, ps2)) == NULL) {
78 err_ret->error = E_NOMEM;
79 return NULL;
81 tok->filename = filename;
82 if (Py_TabcheckFlag || Py_VerboseFlag) {
83 tok->altwarning = (filename != NULL);
84 if (Py_TabcheckFlag >= 2)
85 tok->alterror++;
89 return parsetok(tok, g, start, err_ret, flags);
92 #if 0
93 static char with_msg[] =
94 "%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
96 static char as_msg[] =
97 "%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";
99 static void
100 warn(const char *msg, const char *filename, int lineno)
102 if (filename == NULL)
103 filename = "<string>";
104 PySys_WriteStderr(msg, filename, lineno);
106 #endif
108 /* Parse input coming from the given tokenizer structure.
109 Return error code. */
111 static node *
112 parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
113 int flags)
115 parser_state *ps;
116 node *n;
117 int started = 0, handling_import = 0, handling_with = 0;
119 if ((ps = PyParser_New(g, start)) == NULL) {
120 fprintf(stderr, "no mem for new parser\n");
121 err_ret->error = E_NOMEM;
122 PyTokenizer_Free(tok);
123 return NULL;
125 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
126 if (flags & PyPARSE_WITH_IS_KEYWORD)
127 ps->p_flags |= CO_FUTURE_WITH_STATEMENT;
128 #endif
130 for (;;) {
131 char *a, *b;
132 int type;
133 size_t len;
134 char *str;
135 int col_offset;
137 type = PyTokenizer_Get(tok, &a, &b);
138 if (type == ERRORTOKEN) {
139 err_ret->error = tok->done;
140 break;
142 if (type == ENDMARKER && started) {
143 type = NEWLINE; /* Add an extra newline */
144 handling_with = handling_import = 0;
145 started = 0;
146 /* Add the right number of dedent tokens,
147 except if a certain flag is given --
148 codeop.py uses this. */
149 if (tok->indent &&
150 !(flags & PyPARSE_DONT_IMPLY_DEDENT))
152 tok->pendin = -tok->indent;
153 tok->indent = 0;
156 else
157 started = 1;
158 len = b - a; /* XXX this may compute NULL - NULL */
159 str = (char *) PyObject_MALLOC(len + 1);
160 if (str == NULL) {
161 fprintf(stderr, "no mem for next token\n");
162 err_ret->error = E_NOMEM;
163 break;
165 if (len > 0)
166 strncpy(str, a, len);
167 str[len] = '\0';
169 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
170 /* This is only necessary to support the "as" warning, but
171 we don't want to warn about "as" in import statements. */
172 if (type == NAME &&
173 len == 6 && str[0] == 'i' && strcmp(str, "import") == 0)
174 handling_import = 1;
176 /* Warn about with as NAME */
177 if (type == NAME &&
178 !(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) {
179 if (len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
180 warn(with_msg, err_ret->filename, tok->lineno);
181 else if (!(handling_import || handling_with) &&
182 len == 2 && str[0] == 'a' &&
183 strcmp(str, "as") == 0)
184 warn(as_msg, err_ret->filename, tok->lineno);
186 else if (type == NAME &&
187 (ps->p_flags & CO_FUTURE_WITH_STATEMENT) &&
188 len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
189 handling_with = 1;
190 #endif
191 if (a >= tok->line_start)
192 col_offset = a - tok->line_start;
193 else
194 col_offset = -1;
196 if ((err_ret->error =
197 PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset,
198 &(err_ret->expected))) != E_OK) {
199 if (err_ret->error != E_DONE) {
200 PyObject_FREE(str);
201 err_ret->token = type;
203 break;
207 if (err_ret->error == E_DONE) {
208 n = ps->p_tree;
209 ps->p_tree = NULL;
211 else
212 n = NULL;
214 PyParser_Delete(ps);
216 if (n == NULL) {
217 if (tok->lineno <= 1 && tok->done == E_EOF)
218 err_ret->error = E_EOF;
219 err_ret->lineno = tok->lineno;
220 if (tok->buf != NULL) {
221 char *text = NULL;
222 size_t len;
223 assert(tok->cur - tok->buf < INT_MAX);
224 err_ret->offset = (int)(tok->cur - tok->buf);
225 len = tok->inp - tok->buf;
226 #ifdef Py_USING_UNICODE
227 text = PyTokenizer_RestoreEncoding(tok, len, &err_ret->offset);
229 #endif
230 if (text == NULL) {
231 text = (char *) PyObject_MALLOC(len + 1);
232 if (text != NULL) {
233 if (len > 0)
234 strncpy(text, tok->buf, len);
235 text[len] = '\0';
238 err_ret->text = text;
240 } else if (tok->encoding != NULL) {
241 node* r = PyNode_New(encoding_decl);
242 if (!r) {
243 err_ret->error = E_NOMEM;
244 n = NULL;
245 goto done;
247 r->n_str = tok->encoding;
248 r->n_nchildren = 1;
249 r->n_child = n;
250 tok->encoding = NULL;
251 n = r;
254 done:
255 PyTokenizer_Free(tok);
257 return n;
260 static void
261 initerr(perrdetail *err_ret, const char *filename)
263 err_ret->error = E_OK;
264 err_ret->filename = filename;
265 err_ret->lineno = 0;
266 err_ret->offset = 0;
267 err_ret->text = NULL;
268 err_ret->token = -1;
269 err_ret->expected = -1;