1 /****************************************************************************** 2 * 3 * Module Name: psparse - Parser top level AML parse routines 4 * 5 *****************************************************************************/ 6 7 /* 8 * Copyright (C) 2000 - 2016, Intel Corp. 9 * All rights reserved. 10 * 11 * Redistribution and use in source and binary forms, with or without 12 * modification, are permitted provided that the following conditions 13 * are met: 14 * 1. Redistributions of source code must retain the above copyright 15 * notice, this list of conditions, and the following disclaimer, 16 * without modification. 17 * 2. Redistributions in binary form must reproduce at minimum a disclaimer 18 * substantially similar to the "NO WARRANTY" disclaimer below 19 * ("Disclaimer") and any redistribution must be conditioned upon 20 * including a substantially similar Disclaimer requirement for further 21 * binary redistribution. 22 * 3. Neither the names of the above-listed copyright holders nor the names 23 * of any contributors may be used to endorse or promote products derived 24 * from this software without specific prior written permission. 25 * 26 * Alternatively, this software may be distributed under the terms of the 27 * GNU General Public License ("GPL") version 2 as published by the Free 28 * Software Foundation. 29 * 30 * NO WARRANTY 31 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR 34 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35 * HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 36 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 37 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 38 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 39 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 40 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 41 * POSSIBILITY OF SUCH DAMAGES. 42 */ 43 44 /* 45 * Parse the AML and build an operation tree as most interpreters, 46 * like Perl, do. Parsing is done by hand rather than with a YACC 47 * generated parser to tightly constrain stack and dynamic memory 48 * usage. At the same time, parsing is kept flexible and the code 49 * fairly compact by parsing based on a list of AML opcode 50 * templates in aml_op_info[] 51 */ 52 53 #include <acpi/acpi.h> 54 #include "accommon.h" 55 #include "acparser.h" 56 #include "acdispat.h" 57 #include "amlcode.h" 58 #include "acinterp.h" 59 60 #define _COMPONENT ACPI_PARSER 61 ACPI_MODULE_NAME("psparse") 62 63 /******************************************************************************* 64 * 65 * FUNCTION: acpi_ps_get_opcode_size 66 * 67 * PARAMETERS: opcode - An AML opcode 68 * 69 * RETURN: Size of the opcode, in bytes (1 or 2) 70 * 71 * DESCRIPTION: Get the size of the current opcode. 72 * 73 ******************************************************************************/ 74 u32 acpi_ps_get_opcode_size(u32 opcode) 75 { 76 77 /* Extended (2-byte) opcode if > 255 */ 78 79 if (opcode > 0x00FF) { 80 return (2); 81 } 82 83 /* Otherwise, just a single byte opcode */ 84 85 return (1); 86 } 87 88 /******************************************************************************* 89 * 90 * FUNCTION: acpi_ps_peek_opcode 91 * 92 * PARAMETERS: parser_state - A parser state object 93 * 94 * RETURN: Next AML opcode 95 * 96 * DESCRIPTION: Get next AML opcode (without incrementing AML pointer) 97 * 98 ******************************************************************************/ 99 100 u16 acpi_ps_peek_opcode(struct acpi_parse_state * parser_state) 101 { 102 u8 *aml; 103 u16 opcode; 104 105 aml = parser_state->aml; 106 opcode = (u16) ACPI_GET8(aml); 107 108 if (opcode == AML_EXTENDED_OP_PREFIX) { 109 110 /* Extended opcode, get the second opcode byte */ 111 112 aml++; 113 opcode = (u16) ((opcode << 8) | ACPI_GET8(aml)); 114 } 115 116 return (opcode); 117 } 118 119 /******************************************************************************* 120 * 121 * FUNCTION: acpi_ps_complete_this_op 122 * 123 * PARAMETERS: walk_state - Current State 124 * op - Op to complete 125 * 126 * RETURN: Status 127 * 128 * DESCRIPTION: Perform any cleanup at the completion of an Op. 129 * 130 ******************************************************************************/ 131 132 acpi_status 133 acpi_ps_complete_this_op(struct acpi_walk_state * walk_state, 134 union acpi_parse_object * op) 135 { 136 union acpi_parse_object *prev; 137 union acpi_parse_object *next; 138 const struct acpi_opcode_info *parent_info; 139 union acpi_parse_object *replacement_op = NULL; 140 acpi_status status = AE_OK; 141 142 ACPI_FUNCTION_TRACE_PTR(ps_complete_this_op, op); 143 144 /* Check for null Op, can happen if AML code is corrupt */ 145 146 if (!op) { 147 return_ACPI_STATUS(AE_OK); /* OK for now */ 148 } 149 150 acpi_ex_stop_trace_opcode(op, walk_state); 151 152 /* Delete this op and the subtree below it if asked to */ 153 154 if (((walk_state->parse_flags & ACPI_PARSE_TREE_MASK) != 155 ACPI_PARSE_DELETE_TREE) 156 || (walk_state->op_info->class == AML_CLASS_ARGUMENT)) { 157 return_ACPI_STATUS(AE_OK); 158 } 159 160 /* Make sure that we only delete this subtree */ 161 162 if (op->common.parent) { 163 prev = op->common.parent->common.value.arg; 164 if (!prev) { 165 166 /* Nothing more to do */ 167 168 goto cleanup; 169 } 170 171 /* 172 * Check if we need to replace the operator and its subtree 173 * with a return value op (placeholder op) 174 */ 175 parent_info = 176 acpi_ps_get_opcode_info(op->common.parent->common. 177 aml_opcode); 178 179 switch (parent_info->class) { 180 case AML_CLASS_CONTROL: 181 182 break; 183 184 case AML_CLASS_CREATE: 185 /* 186 * These opcodes contain term_arg operands. The current 187 * op must be replaced by a placeholder return op 188 */ 189 replacement_op = 190 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, 191 op->common.aml); 192 if (!replacement_op) { 193 status = AE_NO_MEMORY; 194 } 195 break; 196 197 case AML_CLASS_NAMED_OBJECT: 198 /* 199 * These opcodes contain term_arg operands. The current 200 * op must be replaced by a placeholder return op 201 */ 202 if ((op->common.parent->common.aml_opcode == 203 AML_REGION_OP) 204 || (op->common.parent->common.aml_opcode == 205 AML_DATA_REGION_OP) 206 || (op->common.parent->common.aml_opcode == 207 AML_BUFFER_OP) 208 || (op->common.parent->common.aml_opcode == 209 AML_PACKAGE_OP) 210 || (op->common.parent->common.aml_opcode == 211 AML_BANK_FIELD_OP) 212 || (op->common.parent->common.aml_opcode == 213 AML_VAR_PACKAGE_OP)) { 214 replacement_op = 215 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, 216 op->common.aml); 217 if (!replacement_op) { 218 status = AE_NO_MEMORY; 219 } 220 } else 221 if ((op->common.parent->common.aml_opcode == 222 AML_NAME_OP) 223 && (walk_state->pass_number <= 224 ACPI_IMODE_LOAD_PASS2)) { 225 if ((op->common.aml_opcode == AML_BUFFER_OP) 226 || (op->common.aml_opcode == AML_PACKAGE_OP) 227 || (op->common.aml_opcode == 228 AML_VAR_PACKAGE_OP)) { 229 replacement_op = 230 acpi_ps_alloc_op(op->common. 231 aml_opcode, 232 op->common.aml); 233 if (!replacement_op) { 234 status = AE_NO_MEMORY; 235 } else { 236 replacement_op->named.data = 237 op->named.data; 238 replacement_op->named.length = 239 op->named.length; 240 } 241 } 242 } 243 break; 244 245 default: 246 247 replacement_op = 248 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, 249 op->common.aml); 250 if (!replacement_op) { 251 status = AE_NO_MEMORY; 252 } 253 } 254 255 /* We must unlink this op from the parent tree */ 256 257 if (prev == op) { 258 259 /* This op is the first in the list */ 260 261 if (replacement_op) { 262 replacement_op->common.parent = 263 op->common.parent; 264 replacement_op->common.value.arg = NULL; 265 replacement_op->common.node = op->common.node; 266 op->common.parent->common.value.arg = 267 replacement_op; 268 replacement_op->common.next = op->common.next; 269 } else { 270 op->common.parent->common.value.arg = 271 op->common.next; 272 } 273 } 274 275 /* Search the parent list */ 276 277 else 278 while (prev) { 279 280 /* Traverse all siblings in the parent's argument list */ 281 282 next = prev->common.next; 283 if (next == op) { 284 if (replacement_op) { 285 replacement_op->common.parent = 286 op->common.parent; 287 replacement_op->common.value. 288 arg = NULL; 289 replacement_op->common.node = 290 op->common.node; 291 prev->common.next = 292 replacement_op; 293 replacement_op->common.next = 294 op->common.next; 295 next = NULL; 296 } else { 297 prev->common.next = 298 op->common.next; 299 next = NULL; 300 } 301 } 302 prev = next; 303 } 304 } 305 306 cleanup: 307 308 /* Now we can actually delete the subtree rooted at Op */ 309 310 acpi_ps_delete_parse_tree(op); 311 return_ACPI_STATUS(status); 312 } 313 314 /******************************************************************************* 315 * 316 * FUNCTION: acpi_ps_next_parse_state 317 * 318 * PARAMETERS: walk_state - Current state 319 * op - Current parse op 320 * callback_status - Status from previous operation 321 * 322 * RETURN: Status 323 * 324 * DESCRIPTION: Update the parser state based upon the return exception from 325 * the parser callback. 326 * 327 ******************************************************************************/ 328 329 acpi_status 330 acpi_ps_next_parse_state(struct acpi_walk_state *walk_state, 331 union acpi_parse_object *op, 332 acpi_status callback_status) 333 { 334 struct acpi_parse_state *parser_state = &walk_state->parser_state; 335 acpi_status status = AE_CTRL_PENDING; 336 337 ACPI_FUNCTION_TRACE_PTR(ps_next_parse_state, op); 338 339 switch (callback_status) { 340 case AE_CTRL_TERMINATE: 341 /* 342 * A control method was terminated via a RETURN statement. 343 * The walk of this method is complete. 344 */ 345 parser_state->aml = parser_state->aml_end; 346 status = AE_CTRL_TERMINATE; 347 break; 348 349 case AE_CTRL_BREAK: 350 351 parser_state->aml = walk_state->aml_last_while; 352 walk_state->control_state->common.value = FALSE; 353 status = AE_CTRL_BREAK; 354 break; 355 356 case AE_CTRL_CONTINUE: 357 358 parser_state->aml = walk_state->aml_last_while; 359 status = AE_CTRL_CONTINUE; 360 break; 361 362 case AE_CTRL_PENDING: 363 364 parser_state->aml = walk_state->aml_last_while; 365 break; 366 367 #if 0 368 case AE_CTRL_SKIP: 369 370 parser_state->aml = parser_state->scope->parse_scope.pkg_end; 371 status = AE_OK; 372 break; 373 #endif 374 375 case AE_CTRL_TRUE: 376 /* 377 * Predicate of an IF was true, and we are at the matching ELSE. 378 * Just close out this package 379 */ 380 parser_state->aml = acpi_ps_get_next_package_end(parser_state); 381 status = AE_CTRL_PENDING; 382 break; 383 384 case AE_CTRL_FALSE: 385 /* 386 * Either an IF/WHILE Predicate was false or we encountered a BREAK 387 * opcode. In both cases, we do not execute the rest of the 388 * package; We simply close out the parent (finishing the walk of 389 * this branch of the tree) and continue execution at the parent 390 * level. 391 */ 392 parser_state->aml = parser_state->scope->parse_scope.pkg_end; 393 394 /* In the case of a BREAK, just force a predicate (if any) to FALSE */ 395 396 walk_state->control_state->common.value = FALSE; 397 status = AE_CTRL_END; 398 break; 399 400 case AE_CTRL_TRANSFER: 401 402 /* A method call (invocation) -- transfer control */ 403 404 status = AE_CTRL_TRANSFER; 405 walk_state->prev_op = op; 406 walk_state->method_call_op = op; 407 walk_state->method_call_node = 408 (op->common.value.arg)->common.node; 409 410 /* Will return value (if any) be used by the caller? */ 411 412 walk_state->return_used = 413 acpi_ds_is_result_used(op, walk_state); 414 break; 415 416 default: 417 418 status = callback_status; 419 if ((callback_status & AE_CODE_MASK) == AE_CODE_CONTROL) { 420 status = AE_OK; 421 } 422 break; 423 } 424 425 return_ACPI_STATUS(status); 426 } 427 428 /******************************************************************************* 429 * 430 * FUNCTION: acpi_ps_parse_aml 431 * 432 * PARAMETERS: walk_state - Current state 433 * 434 * 435 * RETURN: Status 436 * 437 * DESCRIPTION: Parse raw AML and return a tree of ops 438 * 439 ******************************************************************************/ 440 441 acpi_status acpi_ps_parse_aml(struct acpi_walk_state *walk_state) 442 { 443 acpi_status status; 444 struct acpi_thread_state *thread; 445 struct acpi_thread_state *prev_walk_list = acpi_gbl_current_walk_list; 446 struct acpi_walk_state *previous_walk_state; 447 448 ACPI_FUNCTION_TRACE(ps_parse_aml); 449 450 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, 451 "Entered with WalkState=%p Aml=%p size=%X\n", 452 walk_state, walk_state->parser_state.aml, 453 walk_state->parser_state.aml_size)); 454 455 if (!walk_state->parser_state.aml) { 456 return_ACPI_STATUS(AE_NULL_OBJECT); 457 } 458 459 /* Create and initialize a new thread state */ 460 461 thread = acpi_ut_create_thread_state(); 462 if (!thread) { 463 if (walk_state->method_desc) { 464 465 /* Executing a control method - additional cleanup */ 466 467 acpi_ds_terminate_control_method(walk_state-> 468 method_desc, 469 walk_state); 470 } 471 472 acpi_ds_delete_walk_state(walk_state); 473 return_ACPI_STATUS(AE_NO_MEMORY); 474 } 475 476 walk_state->thread = thread; 477 478 /* 479 * If executing a method, the starting sync_level is this method's 480 * sync_level 481 */ 482 if (walk_state->method_desc) { 483 walk_state->thread->current_sync_level = 484 walk_state->method_desc->method.sync_level; 485 } 486 487 acpi_ds_push_walk_state(walk_state, thread); 488 489 /* 490 * This global allows the AML debugger to get a handle to the currently 491 * executing control method. 492 */ 493 acpi_gbl_current_walk_list = thread; 494 495 /* 496 * Execute the walk loop as long as there is a valid Walk State. This 497 * handles nested control method invocations without recursion. 498 */ 499 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, "State=%p\n", walk_state)); 500 501 status = AE_OK; 502 while (walk_state) { 503 if (ACPI_SUCCESS(status)) { 504 /* 505 * The parse_loop executes AML until the method terminates 506 * or calls another method. 507 */ 508 status = acpi_ps_parse_loop(walk_state); 509 } 510 511 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, 512 "Completed one call to walk loop, %s State=%p\n", 513 acpi_format_exception(status), walk_state)); 514 515 if (status == AE_CTRL_TRANSFER) { 516 /* 517 * A method call was detected. 518 * Transfer control to the called control method 519 */ 520 status = 521 acpi_ds_call_control_method(thread, walk_state, 522 NULL); 523 if (ACPI_FAILURE(status)) { 524 status = 525 acpi_ds_method_error(status, walk_state); 526 } 527 528 /* 529 * If the transfer to the new method method call worked 530 *, a new walk state was created -- get it 531 */ 532 walk_state = acpi_ds_get_current_walk_state(thread); 533 continue; 534 } else if (status == AE_CTRL_TERMINATE) { 535 status = AE_OK; 536 } else if ((status != AE_OK) && (walk_state->method_desc)) { 537 538 /* Either the method parse or actual execution failed */ 539 540 ACPI_ERROR_METHOD("Method parse/execution failed", 541 walk_state->method_node, NULL, 542 status); 543 544 /* Check for possible multi-thread reentrancy problem */ 545 546 if ((status == AE_ALREADY_EXISTS) && 547 (!(walk_state->method_desc->method.info_flags & 548 ACPI_METHOD_SERIALIZED))) { 549 /* 550 * Method is not serialized and tried to create an object 551 * twice. The probable cause is that the method cannot 552 * handle reentrancy. Mark as "pending serialized" now, and 553 * then mark "serialized" when the last thread exits. 554 */ 555 walk_state->method_desc->method.info_flags |= 556 ACPI_METHOD_SERIALIZED_PENDING; 557 } 558 } 559 560 /* We are done with this walk, move on to the parent if any */ 561 562 walk_state = acpi_ds_pop_walk_state(thread); 563 564 /* Reset the current scope to the beginning of scope stack */ 565 566 acpi_ds_scope_stack_clear(walk_state); 567 568 /* 569 * If we just returned from the execution of a control method or if we 570 * encountered an error during the method parse phase, there's lots of 571 * cleanup to do 572 */ 573 if (((walk_state->parse_flags & ACPI_PARSE_MODE_MASK) == 574 ACPI_PARSE_EXECUTE) || (ACPI_FAILURE(status))) { 575 acpi_ds_terminate_control_method(walk_state-> 576 method_desc, 577 walk_state); 578 } 579 580 /* Delete this walk state and all linked control states */ 581 582 acpi_ps_cleanup_scope(&walk_state->parser_state); 583 previous_walk_state = walk_state; 584 585 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, 586 "ReturnValue=%p, ImplicitValue=%p State=%p\n", 587 walk_state->return_desc, 588 walk_state->implicit_return_obj, walk_state)); 589 590 /* Check if we have restarted a preempted walk */ 591 592 walk_state = acpi_ds_get_current_walk_state(thread); 593 if (walk_state) { 594 if (ACPI_SUCCESS(status)) { 595 /* 596 * There is another walk state, restart it. 597 * If the method return value is not used by the parent, 598 * The object is deleted 599 */ 600 if (!previous_walk_state->return_desc) { 601 /* 602 * In slack mode execution, if there is no return value 603 * we should implicitly return zero (0) as a default value. 604 */ 605 if (acpi_gbl_enable_interpreter_slack && 606 !previous_walk_state-> 607 implicit_return_obj) { 608 previous_walk_state-> 609 implicit_return_obj = 610 acpi_ut_create_integer_object 611 ((u64) 0); 612 if (!previous_walk_state-> 613 implicit_return_obj) { 614 return_ACPI_STATUS 615 (AE_NO_MEMORY); 616 } 617 } 618 619 /* Restart the calling control method */ 620 621 status = 622 acpi_ds_restart_control_method 623 (walk_state, 624 previous_walk_state-> 625 implicit_return_obj); 626 } else { 627 /* 628 * We have a valid return value, delete any implicit 629 * return value. 630 */ 631 acpi_ds_clear_implicit_return 632 (previous_walk_state); 633 634 status = 635 acpi_ds_restart_control_method 636 (walk_state, 637 previous_walk_state->return_desc); 638 } 639 if (ACPI_SUCCESS(status)) { 640 walk_state->walk_type |= 641 ACPI_WALK_METHOD_RESTART; 642 } 643 } else { 644 /* On error, delete any return object or implicit return */ 645 646 acpi_ut_remove_reference(previous_walk_state-> 647 return_desc); 648 acpi_ds_clear_implicit_return 649 (previous_walk_state); 650 } 651 } 652 653 /* 654 * Just completed a 1st-level method, save the final internal return 655 * value (if any) 656 */ 657 else if (previous_walk_state->caller_return_desc) { 658 if (previous_walk_state->implicit_return_obj) { 659 *(previous_walk_state->caller_return_desc) = 660 previous_walk_state->implicit_return_obj; 661 } else { 662 /* NULL if no return value */ 663 664 *(previous_walk_state->caller_return_desc) = 665 previous_walk_state->return_desc; 666 } 667 } else { 668 if (previous_walk_state->return_desc) { 669 670 /* Caller doesn't want it, must delete it */ 671 672 acpi_ut_remove_reference(previous_walk_state-> 673 return_desc); 674 } 675 if (previous_walk_state->implicit_return_obj) { 676 677 /* Caller doesn't want it, must delete it */ 678 679 acpi_ut_remove_reference(previous_walk_state-> 680 implicit_return_obj); 681 } 682 } 683 684 acpi_ds_delete_walk_state(previous_walk_state); 685 } 686 687 /* Normal exit */ 688 689 acpi_ex_release_all_mutexes(thread); 690 acpi_ut_delete_generic_state(ACPI_CAST_PTR 691 (union acpi_generic_state, thread)); 692 acpi_gbl_current_walk_list = prev_walk_list; 693 return_ACPI_STATUS(status); 694 } 695