1: | <?php declare(strict_types = 1); |
2: | |
3: | namespace PHPStan\PhpDocParser\Parser; |
4: | |
5: | use LogicException; |
6: | use PHPStan\PhpDocParser\Ast; |
7: | use PHPStan\PhpDocParser\Lexer\Lexer; |
8: | use function in_array; |
9: | use function strpos; |
10: | use function trim; |
11: | |
12: | class TypeParser |
13: | { |
14: | |
15: | |
16: | private $constExprParser; |
17: | |
18: | public function __construct(?ConstExprParser $constExprParser = null) |
19: | { |
20: | $this->constExprParser = $constExprParser; |
21: | } |
22: | |
23: | |
24: | public function parse(TokenIterator $tokens): Ast\Type\TypeNode |
25: | { |
26: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_NULLABLE)) { |
27: | $type = $this->parseNullable($tokens); |
28: | |
29: | } else { |
30: | $type = $this->parseAtomic($tokens); |
31: | |
32: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_UNION)) { |
33: | $type = $this->parseUnion($tokens, $type); |
34: | |
35: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_INTERSECTION)) { |
36: | $type = $this->parseIntersection($tokens, $type); |
37: | } |
38: | } |
39: | |
40: | return $type; |
41: | } |
42: | |
43: | |
44: | private function subParse(TokenIterator $tokens): Ast\Type\TypeNode |
45: | { |
46: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_NULLABLE)) { |
47: | $type = $this->parseNullable($tokens); |
48: | |
49: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_VARIABLE)) { |
50: | $type = $this->parseConditionalForParameter($tokens, $tokens->currentTokenValue()); |
51: | |
52: | } else { |
53: | $type = $this->parseAtomic($tokens); |
54: | |
55: | if ($tokens->isCurrentTokenValue('is')) { |
56: | $type = $this->parseConditional($tokens, $type); |
57: | } else { |
58: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
59: | |
60: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_UNION)) { |
61: | $type = $this->subParseUnion($tokens, $type); |
62: | |
63: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_INTERSECTION)) { |
64: | $type = $this->subParseIntersection($tokens, $type); |
65: | } |
66: | } |
67: | } |
68: | |
69: | return $type; |
70: | } |
71: | |
72: | |
73: | |
74: | private function parseAtomic(TokenIterator $tokens): Ast\Type\TypeNode |
75: | { |
76: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_OPEN_PARENTHESES)) { |
77: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
78: | $type = $this->subParse($tokens); |
79: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
80: | |
81: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_PARENTHESES); |
82: | |
83: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
84: | return $this->tryParseArrayOrOffsetAccess($tokens, $type); |
85: | } |
86: | |
87: | return $type; |
88: | } |
89: | |
90: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_THIS_VARIABLE)) { |
91: | $type = new Ast\Type\ThisTypeNode(); |
92: | |
93: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
94: | return $this->tryParseArrayOrOffsetAccess($tokens, $type); |
95: | } |
96: | |
97: | return $type; |
98: | } |
99: | |
100: | $currentTokenValue = $tokens->currentTokenValue(); |
101: | $tokens->pushSavePoint(); |
102: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_IDENTIFIER)) { |
103: | $type = new Ast\Type\IdentifierTypeNode($currentTokenValue); |
104: | |
105: | if (!$tokens->isCurrentTokenType(Lexer::TOKEN_DOUBLE_COLON)) { |
106: | $tokens->dropSavePoint(); |
107: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET)) { |
108: | $tokens->pushSavePoint(); |
109: | |
110: | $isHtml = $this->isHtml($tokens); |
111: | $tokens->rollback(); |
112: | if ($isHtml) { |
113: | return $type; |
114: | } |
115: | |
116: | $type = $this->parseGeneric($tokens, $type); |
117: | |
118: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
119: | $type = $this->tryParseArrayOrOffsetAccess($tokens, $type); |
120: | } |
121: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_PARENTHESES)) { |
122: | $type = $this->tryParseCallable($tokens, $type); |
123: | |
124: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
125: | $type = $this->tryParseArrayOrOffsetAccess($tokens, $type); |
126: | |
127: | } elseif (in_array($type->name, ['array', 'list', 'object'], true) && $tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_CURLY_BRACKET) && !$tokens->isPrecededByHorizontalWhitespace()) { |
128: | if ($type->name === 'object') { |
129: | $type = $this->parseObjectShape($tokens); |
130: | } else { |
131: | $type = $this->parseArrayShape($tokens, $type, $type->name); |
132: | } |
133: | |
134: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
135: | $type = $this->tryParseArrayOrOffsetAccess($tokens, $type); |
136: | } |
137: | } |
138: | |
139: | return $type; |
140: | } else { |
141: | $tokens->rollback(); |
142: | } |
143: | } else { |
144: | $tokens->dropSavePoint(); |
145: | } |
146: | |
147: | $exception = new ParserException( |
148: | $tokens->currentTokenValue(), |
149: | $tokens->currentTokenType(), |
150: | $tokens->currentTokenOffset(), |
151: | Lexer::TOKEN_IDENTIFIER |
152: | ); |
153: | |
154: | if ($this->constExprParser === null) { |
155: | throw $exception; |
156: | } |
157: | |
158: | try { |
159: | $constExpr = $this->constExprParser->parse($tokens, true); |
160: | if ($constExpr instanceof Ast\ConstExpr\ConstExprArrayNode) { |
161: | throw $exception; |
162: | } |
163: | |
164: | return new Ast\Type\ConstTypeNode($constExpr); |
165: | } catch (LogicException $e) { |
166: | throw $exception; |
167: | } |
168: | } |
169: | |
170: | |
171: | |
172: | private function parseUnion(TokenIterator $tokens, Ast\Type\TypeNode $type): Ast\Type\TypeNode |
173: | { |
174: | $types = [$type]; |
175: | |
176: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_UNION)) { |
177: | $types[] = $this->parseAtomic($tokens); |
178: | } |
179: | |
180: | return new Ast\Type\UnionTypeNode($types); |
181: | } |
182: | |
183: | |
184: | |
185: | private function subParseUnion(TokenIterator $tokens, Ast\Type\TypeNode $type): Ast\Type\TypeNode |
186: | { |
187: | $types = [$type]; |
188: | |
189: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_UNION)) { |
190: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
191: | $types[] = $this->parseAtomic($tokens); |
192: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
193: | } |
194: | |
195: | return new Ast\Type\UnionTypeNode($types); |
196: | } |
197: | |
198: | |
199: | |
200: | private function parseIntersection(TokenIterator $tokens, Ast\Type\TypeNode $type): Ast\Type\TypeNode |
201: | { |
202: | $types = [$type]; |
203: | |
204: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_INTERSECTION)) { |
205: | $types[] = $this->parseAtomic($tokens); |
206: | } |
207: | |
208: | return new Ast\Type\IntersectionTypeNode($types); |
209: | } |
210: | |
211: | |
212: | |
213: | private function subParseIntersection(TokenIterator $tokens, Ast\Type\TypeNode $type): Ast\Type\TypeNode |
214: | { |
215: | $types = [$type]; |
216: | |
217: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_INTERSECTION)) { |
218: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
219: | $types[] = $this->parseAtomic($tokens); |
220: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
221: | } |
222: | |
223: | return new Ast\Type\IntersectionTypeNode($types); |
224: | } |
225: | |
226: | |
227: | |
228: | private function parseConditional(TokenIterator $tokens, Ast\Type\TypeNode $subjectType): Ast\Type\TypeNode |
229: | { |
230: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
231: | |
232: | $negated = false; |
233: | if ($tokens->isCurrentTokenValue('not')) { |
234: | $negated = true; |
235: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
236: | } |
237: | |
238: | $targetType = $this->parse($tokens); |
239: | |
240: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
241: | $tokens->consumeTokenType(Lexer::TOKEN_NULLABLE); |
242: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
243: | |
244: | $ifType = $this->parse($tokens); |
245: | |
246: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
247: | $tokens->consumeTokenType(Lexer::TOKEN_COLON); |
248: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
249: | |
250: | $elseType = $this->subParse($tokens); |
251: | |
252: | return new Ast\Type\ConditionalTypeNode($subjectType, $targetType, $ifType, $elseType, $negated); |
253: | } |
254: | |
255: | |
256: | private function parseConditionalForParameter(TokenIterator $tokens, string $parameterName): Ast\Type\TypeNode |
257: | { |
258: | $tokens->consumeTokenType(Lexer::TOKEN_VARIABLE); |
259: | $tokens->consumeTokenValue(Lexer::TOKEN_IDENTIFIER, 'is'); |
260: | |
261: | $negated = false; |
262: | if ($tokens->isCurrentTokenValue('not')) { |
263: | $negated = true; |
264: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
265: | } |
266: | |
267: | $targetType = $this->parse($tokens); |
268: | |
269: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
270: | $tokens->consumeTokenType(Lexer::TOKEN_NULLABLE); |
271: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
272: | |
273: | $ifType = $this->parse($tokens); |
274: | |
275: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
276: | $tokens->consumeTokenType(Lexer::TOKEN_COLON); |
277: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
278: | |
279: | $elseType = $this->subParse($tokens); |
280: | |
281: | return new Ast\Type\ConditionalTypeForParameterNode($parameterName, $targetType, $ifType, $elseType, $negated); |
282: | } |
283: | |
284: | |
285: | |
286: | private function parseNullable(TokenIterator $tokens): Ast\Type\TypeNode |
287: | { |
288: | $tokens->consumeTokenType(Lexer::TOKEN_NULLABLE); |
289: | |
290: | $type = $this->parseAtomic($tokens); |
291: | |
292: | return new Ast\Type\NullableTypeNode($type); |
293: | } |
294: | |
295: | |
296: | public function isHtml(TokenIterator $tokens): bool |
297: | { |
298: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET); |
299: | |
300: | if (!$tokens->isCurrentTokenType(Lexer::TOKEN_IDENTIFIER)) { |
301: | return false; |
302: | } |
303: | |
304: | $htmlTagName = $tokens->currentTokenValue(); |
305: | |
306: | $tokens->next(); |
307: | |
308: | if (!$tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET)) { |
309: | return false; |
310: | } |
311: | |
312: | while (!$tokens->isCurrentTokenType(Lexer::TOKEN_END)) { |
313: | if ( |
314: | $tokens->tryConsumeTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET) |
315: | && strpos($tokens->currentTokenValue(), '/' . $htmlTagName . '>') !== false |
316: | ) { |
317: | return true; |
318: | } |
319: | |
320: | $tokens->next(); |
321: | } |
322: | |
323: | return false; |
324: | } |
325: | |
326: | |
327: | public function parseGeneric(TokenIterator $tokens, Ast\Type\IdentifierTypeNode $baseType): Ast\Type\GenericTypeNode |
328: | { |
329: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET); |
330: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
331: | |
332: | $genericTypes = []; |
333: | $variances = []; |
334: | |
335: | [$genericTypes[], $variances[]] = $this->parseGenericTypeArgument($tokens); |
336: | |
337: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
338: | |
339: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) { |
340: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
341: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET)) { |
342: | |
343: | return new Ast\Type\GenericTypeNode($baseType, $genericTypes, $variances); |
344: | } |
345: | [$genericTypes[], $variances[]] = $this->parseGenericTypeArgument($tokens); |
346: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
347: | } |
348: | |
349: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
350: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_ANGLE_BRACKET); |
351: | |
352: | return new Ast\Type\GenericTypeNode($baseType, $genericTypes, $variances); |
353: | } |
354: | |
355: | |
356: | |
357: | |
358: | |
359: | |
360: | public function parseGenericTypeArgument(TokenIterator $tokens): array |
361: | { |
362: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_WILDCARD)) { |
363: | return [ |
364: | new Ast\Type\IdentifierTypeNode('mixed'), |
365: | Ast\Type\GenericTypeNode::VARIANCE_BIVARIANT, |
366: | ]; |
367: | } |
368: | |
369: | if ($tokens->tryConsumeTokenValue('contravariant')) { |
370: | $variance = Ast\Type\GenericTypeNode::VARIANCE_CONTRAVARIANT; |
371: | } elseif ($tokens->tryConsumeTokenValue('covariant')) { |
372: | $variance = Ast\Type\GenericTypeNode::VARIANCE_COVARIANT; |
373: | } else { |
374: | $variance = Ast\Type\GenericTypeNode::VARIANCE_INVARIANT; |
375: | } |
376: | |
377: | $type = $this->parse($tokens); |
378: | return [$type, $variance]; |
379: | } |
380: | |
381: | |
382: | |
383: | private function parseCallable(TokenIterator $tokens, Ast\Type\IdentifierTypeNode $identifier): Ast\Type\TypeNode |
384: | { |
385: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_PARENTHESES); |
386: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
387: | |
388: | $parameters = []; |
389: | if (!$tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_PARENTHESES)) { |
390: | $parameters[] = $this->parseCallableParameter($tokens); |
391: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
392: | while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)) { |
393: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
394: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_PARENTHESES)) { |
395: | break; |
396: | } |
397: | $parameters[] = $this->parseCallableParameter($tokens); |
398: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
399: | } |
400: | } |
401: | |
402: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_PARENTHESES); |
403: | $tokens->consumeTokenType(Lexer::TOKEN_COLON); |
404: | $returnType = $this->parseCallableReturnType($tokens); |
405: | |
406: | return new Ast\Type\CallableTypeNode($identifier, $parameters, $returnType); |
407: | } |
408: | |
409: | |
410: | |
411: | private function parseCallableParameter(TokenIterator $tokens): Ast\Type\CallableTypeParameterNode |
412: | { |
413: | $type = $this->parse($tokens); |
414: | $isReference = $tokens->tryConsumeTokenType(Lexer::TOKEN_REFERENCE); |
415: | $isVariadic = $tokens->tryConsumeTokenType(Lexer::TOKEN_VARIADIC); |
416: | |
417: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_VARIABLE)) { |
418: | $parameterName = $tokens->currentTokenValue(); |
419: | $tokens->consumeTokenType(Lexer::TOKEN_VARIABLE); |
420: | |
421: | } else { |
422: | $parameterName = ''; |
423: | } |
424: | |
425: | $isOptional = $tokens->tryConsumeTokenType(Lexer::TOKEN_EQUAL); |
426: | return new Ast\Type\CallableTypeParameterNode($type, $isReference, $isVariadic, $parameterName, $isOptional); |
427: | } |
428: | |
429: | |
430: | |
431: | private function parseCallableReturnType(TokenIterator $tokens): Ast\Type\TypeNode |
432: | { |
433: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_NULLABLE)) { |
434: | $type = $this->parseNullable($tokens); |
435: | |
436: | } elseif ($tokens->tryConsumeTokenType(Lexer::TOKEN_OPEN_PARENTHESES)) { |
437: | $type = $this->parse($tokens); |
438: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_PARENTHESES); |
439: | |
440: | } else { |
441: | $type = new Ast\Type\IdentifierTypeNode($tokens->currentTokenValue()); |
442: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
443: | |
444: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_ANGLE_BRACKET)) { |
445: | $type = $this->parseGeneric($tokens, $type); |
446: | |
447: | } elseif (in_array($type->name, ['array', 'list'], true) && $tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_CURLY_BRACKET) && !$tokens->isPrecededByHorizontalWhitespace()) { |
448: | $type = $this->parseArrayShape($tokens, $type, $type->name); |
449: | } |
450: | } |
451: | |
452: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
453: | $type = $this->tryParseArrayOrOffsetAccess($tokens, $type); |
454: | } |
455: | |
456: | return $type; |
457: | } |
458: | |
459: | |
460: | |
461: | private function tryParseCallable(TokenIterator $tokens, Ast\Type\IdentifierTypeNode $identifier): Ast\Type\TypeNode |
462: | { |
463: | try { |
464: | $tokens->pushSavePoint(); |
465: | $type = $this->parseCallable($tokens, $identifier); |
466: | $tokens->dropSavePoint(); |
467: | |
468: | } catch (ParserException $e) { |
469: | $tokens->rollback(); |
470: | $type = $identifier; |
471: | } |
472: | |
473: | return $type; |
474: | } |
475: | |
476: | |
477: | |
478: | private function tryParseArrayOrOffsetAccess(TokenIterator $tokens, Ast\Type\TypeNode $type): Ast\Type\TypeNode |
479: | { |
480: | try { |
481: | while ($tokens->isCurrentTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET)) { |
482: | $tokens->pushSavePoint(); |
483: | |
484: | $canBeOffsetAccessType = !$tokens->isPrecededByHorizontalWhitespace(); |
485: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_SQUARE_BRACKET); |
486: | |
487: | if ($canBeOffsetAccessType && !$tokens->isCurrentTokenType(Lexer::TOKEN_CLOSE_SQUARE_BRACKET)) { |
488: | $offset = $this->parse($tokens); |
489: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_SQUARE_BRACKET); |
490: | $tokens->dropSavePoint(); |
491: | $type = new Ast\Type\OffsetAccessTypeNode($type, $offset); |
492: | } else { |
493: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_SQUARE_BRACKET); |
494: | $tokens->dropSavePoint(); |
495: | $type = new Ast\Type\ArrayTypeNode($type); |
496: | } |
497: | } |
498: | |
499: | } catch (ParserException $e) { |
500: | $tokens->rollback(); |
501: | } |
502: | |
503: | return $type; |
504: | } |
505: | |
506: | |
507: | |
508: | |
509: | |
510: | |
511: | private function parseArrayShape(TokenIterator $tokens, Ast\Type\TypeNode $type, string $kind): Ast\Type\ArrayShapeNode |
512: | { |
513: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_CURLY_BRACKET); |
514: | |
515: | $items = []; |
516: | $sealed = true; |
517: | |
518: | do { |
519: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
520: | |
521: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET)) { |
522: | return new Ast\Type\ArrayShapeNode($items, true, $kind); |
523: | } |
524: | |
525: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_VARIADIC)) { |
526: | $sealed = false; |
527: | $tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA); |
528: | break; |
529: | } |
530: | |
531: | $items[] = $this->parseArrayShapeItem($tokens); |
532: | |
533: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
534: | } while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)); |
535: | |
536: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
537: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET); |
538: | |
539: | return new Ast\Type\ArrayShapeNode($items, $sealed, $kind); |
540: | } |
541: | |
542: | |
543: | |
544: | private function parseArrayShapeItem(TokenIterator $tokens): Ast\Type\ArrayShapeItemNode |
545: | { |
546: | try { |
547: | $tokens->pushSavePoint(); |
548: | $key = $this->parseArrayShapeKey($tokens); |
549: | $optional = $tokens->tryConsumeTokenType(Lexer::TOKEN_NULLABLE); |
550: | $tokens->consumeTokenType(Lexer::TOKEN_COLON); |
551: | $value = $this->parse($tokens); |
552: | $tokens->dropSavePoint(); |
553: | |
554: | return new Ast\Type\ArrayShapeItemNode($key, $optional, $value); |
555: | } catch (ParserException $e) { |
556: | $tokens->rollback(); |
557: | $value = $this->parse($tokens); |
558: | |
559: | return new Ast\Type\ArrayShapeItemNode(null, false, $value); |
560: | } |
561: | } |
562: | |
563: | |
564: | |
565: | |
566: | |
567: | private function parseArrayShapeKey(TokenIterator $tokens) |
568: | { |
569: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_INTEGER)) { |
570: | $key = new Ast\ConstExpr\ConstExprIntegerNode($tokens->currentTokenValue()); |
571: | $tokens->next(); |
572: | |
573: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_SINGLE_QUOTED_STRING)) { |
574: | $key = new Ast\ConstExpr\ConstExprStringNode(trim($tokens->currentTokenValue(), "'")); |
575: | $tokens->next(); |
576: | |
577: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_DOUBLE_QUOTED_STRING)) { |
578: | $key = new Ast\ConstExpr\ConstExprStringNode(trim($tokens->currentTokenValue(), '"')); |
579: | $tokens->next(); |
580: | |
581: | } else { |
582: | $key = new Ast\Type\IdentifierTypeNode($tokens->currentTokenValue()); |
583: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
584: | } |
585: | |
586: | return $key; |
587: | } |
588: | |
589: | |
590: | |
591: | |
592: | private function parseObjectShape(TokenIterator $tokens): Ast\Type\ObjectShapeNode |
593: | { |
594: | $tokens->consumeTokenType(Lexer::TOKEN_OPEN_CURLY_BRACKET); |
595: | |
596: | $items = []; |
597: | |
598: | do { |
599: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
600: | |
601: | if ($tokens->tryConsumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET)) { |
602: | return new Ast\Type\ObjectShapeNode($items); |
603: | } |
604: | |
605: | $items[] = $this->parseObjectShapeItem($tokens); |
606: | |
607: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
608: | } while ($tokens->tryConsumeTokenType(Lexer::TOKEN_COMMA)); |
609: | |
610: | $tokens->tryConsumeTokenType(Lexer::TOKEN_PHPDOC_EOL); |
611: | $tokens->consumeTokenType(Lexer::TOKEN_CLOSE_CURLY_BRACKET); |
612: | |
613: | return new Ast\Type\ObjectShapeNode($items); |
614: | } |
615: | |
616: | |
617: | private function parseObjectShapeItem(TokenIterator $tokens): Ast\Type\ObjectShapeItemNode |
618: | { |
619: | $key = $this->parseObjectShapeKey($tokens); |
620: | $optional = $tokens->tryConsumeTokenType(Lexer::TOKEN_NULLABLE); |
621: | $tokens->consumeTokenType(Lexer::TOKEN_COLON); |
622: | $value = $this->parse($tokens); |
623: | |
624: | return new Ast\Type\ObjectShapeItemNode($key, $optional, $value); |
625: | } |
626: | |
627: | |
628: | |
629: | |
630: | |
631: | private function parseObjectShapeKey(TokenIterator $tokens) |
632: | { |
633: | if ($tokens->isCurrentTokenType(Lexer::TOKEN_SINGLE_QUOTED_STRING)) { |
634: | $key = new Ast\ConstExpr\ConstExprStringNode(trim($tokens->currentTokenValue(), "'")); |
635: | $tokens->next(); |
636: | |
637: | } elseif ($tokens->isCurrentTokenType(Lexer::TOKEN_DOUBLE_QUOTED_STRING)) { |
638: | $key = new Ast\ConstExpr\ConstExprStringNode(trim($tokens->currentTokenValue(), '"')); |
639: | $tokens->next(); |
640: | |
641: | } else { |
642: | $key = new Ast\Type\IdentifierTypeNode($tokens->currentTokenValue()); |
643: | $tokens->consumeTokenType(Lexer::TOKEN_IDENTIFIER); |
644: | } |
645: | |
646: | return $key; |
647: | } |
648: | |
649: | } |
650: | |