1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) 2023 Inochi Amaoto <inochiama@outlook.com> 4 */ 5 6 #include <linux/clk-provider.h> 7 #include <linux/io.h> 8 #include <linux/gcd.h> 9 #include <linux/spinlock.h> 10 11 #include "clk-cv18xx-ip.h" 12 13 /* GATE */ 14 static inline struct cv1800_clk_gate *hw_to_cv1800_clk_gate(struct clk_hw *hw) 15 { 16 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw); 17 18 return container_of(common, struct cv1800_clk_gate, common); 19 } 20 21 static int gate_enable(struct clk_hw *hw) 22 { 23 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw); 24 25 return cv1800_clk_setbit(&gate->common, &gate->gate); 26 } 27 28 static void gate_disable(struct clk_hw *hw) 29 { 30 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw); 31 32 cv1800_clk_clearbit(&gate->common, &gate->gate); 33 } 34 35 static int gate_is_enabled(struct clk_hw *hw) 36 { 37 struct cv1800_clk_gate *gate = hw_to_cv1800_clk_gate(hw); 38 39 return cv1800_clk_checkbit(&gate->common, &gate->gate); 40 } 41 42 static unsigned long gate_recalc_rate(struct clk_hw *hw, 43 unsigned long parent_rate) 44 { 45 return parent_rate; 46 } 47 48 static int gate_determine_rate(struct clk_hw *hw, 49 struct clk_rate_request *req) 50 { 51 req->rate = req->best_parent_rate; 52 53 return 0; 54 } 55 56 static int gate_set_rate(struct clk_hw *hw, unsigned long rate, 57 unsigned long parent_rate) 58 { 59 return 0; 60 } 61 62 const struct clk_ops cv1800_clk_gate_ops = { 63 .disable = gate_disable, 64 .enable = gate_enable, 65 .is_enabled = gate_is_enabled, 66 67 .recalc_rate = gate_recalc_rate, 68 .determine_rate = gate_determine_rate, 69 .set_rate = gate_set_rate, 70 }; 71 72 /* DIV */ 73 #define _DIV_EN_CLK_DIV_FACTOR_FIELD BIT(3) 74 75 #define DIV_GET_EN_CLK_DIV_FACTOR(_reg) \ 76 FIELD_GET(_DIV_EN_CLK_DIV_FACTOR_FIELD, _reg) 77 78 #define DIV_SET_EN_DIV_FACTOR(_reg) \ 79 _CV1800_SET_FIELD(_reg, 1, _DIV_EN_CLK_DIV_FACTOR_FIELD) 80 81 static inline struct cv1800_clk_div *hw_to_cv1800_clk_div(struct clk_hw *hw) 82 { 83 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw); 84 85 return container_of(common, struct cv1800_clk_div, common); 86 } 87 88 static int div_enable(struct clk_hw *hw) 89 { 90 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 91 92 return cv1800_clk_setbit(&div->common, &div->gate); 93 } 94 95 static void div_disable(struct clk_hw *hw) 96 { 97 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 98 99 cv1800_clk_clearbit(&div->common, &div->gate); 100 } 101 102 static int div_is_enabled(struct clk_hw *hw) 103 { 104 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 105 106 return cv1800_clk_checkbit(&div->common, &div->gate); 107 } 108 109 static int div_helper_set_rate(struct cv1800_clk_common *common, 110 struct cv1800_clk_regfield *div, 111 unsigned long val) 112 { 113 unsigned long flags; 114 u32 reg; 115 116 if (div->width == 0) 117 return 0; 118 119 spin_lock_irqsave(common->lock, flags); 120 121 reg = readl(common->base + div->reg); 122 reg = cv1800_clk_regfield_set(reg, val, div); 123 if (div->initval > 0) 124 reg = DIV_SET_EN_DIV_FACTOR(reg); 125 126 writel(reg, common->base + div->reg); 127 128 spin_unlock_irqrestore(common->lock, flags); 129 130 return 0; 131 } 132 133 static u32 div_helper_get_clockdiv(struct cv1800_clk_common *common, 134 struct cv1800_clk_regfield *div) 135 { 136 u32 clockdiv = 1; 137 u32 reg; 138 139 if (!div || div->initval < 0 || (div->width == 0 && div->initval <= 0)) 140 return 1; 141 142 if (div->width == 0 && div->initval > 0) 143 return div->initval; 144 145 reg = readl(common->base + div->reg); 146 147 if (div->initval == 0 || DIV_GET_EN_CLK_DIV_FACTOR(reg)) 148 clockdiv = cv1800_clk_regfield_get(reg, div); 149 else if (div->initval > 0) 150 clockdiv = div->initval; 151 152 return clockdiv; 153 } 154 155 static u32 div_helper_round_rate(struct cv1800_clk_regfield *div, 156 struct clk_hw *hw, struct clk_hw *parent, 157 unsigned long rate, unsigned long *prate) 158 { 159 if (div->width == 0) { 160 if (div->initval <= 0) 161 return DIV_ROUND_UP_ULL(*prate, 1); 162 else 163 return DIV_ROUND_UP_ULL(*prate, div->initval); 164 } 165 166 return divider_round_rate_parent(hw, parent, rate, prate, NULL, 167 div->width, div->flags); 168 } 169 170 static long div_round_rate(struct clk_hw *parent, unsigned long *parent_rate, 171 unsigned long rate, int id, void *data) 172 { 173 struct cv1800_clk_div *div = data; 174 175 return div_helper_round_rate(&div->div, &div->common.hw, parent, 176 rate, parent_rate); 177 } 178 179 static bool div_is_better_rate(struct cv1800_clk_common *common, 180 unsigned long target, unsigned long now, 181 unsigned long best) 182 { 183 if (common->features & CLK_DIVIDER_ROUND_CLOSEST) 184 return abs_diff(target, now) < abs_diff(target, best); 185 186 return now <= target && now > best; 187 } 188 189 static int mux_helper_determine_rate(struct cv1800_clk_common *common, 190 struct clk_rate_request *req, 191 long (*round)(struct clk_hw *, 192 unsigned long *, 193 unsigned long, 194 int, 195 void *), 196 void *data) 197 { 198 unsigned long best_parent_rate = 0, best_rate = 0; 199 struct clk_hw *best_parent, *hw = &common->hw; 200 unsigned int i; 201 202 if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) { 203 unsigned long adj_parent_rate; 204 205 best_parent = clk_hw_get_parent(hw); 206 best_parent_rate = clk_hw_get_rate(best_parent); 207 208 best_rate = round(best_parent, &adj_parent_rate, 209 req->rate, -1, data); 210 211 goto find; 212 } 213 214 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 215 unsigned long tmp_rate, parent_rate; 216 struct clk_hw *parent; 217 218 parent = clk_hw_get_parent_by_index(hw, i); 219 if (!parent) 220 continue; 221 222 parent_rate = clk_hw_get_rate(parent); 223 224 tmp_rate = round(parent, &parent_rate, req->rate, i, data); 225 226 if (tmp_rate == req->rate) { 227 best_parent = parent; 228 best_parent_rate = parent_rate; 229 best_rate = tmp_rate; 230 goto find; 231 } 232 233 if (div_is_better_rate(common, req->rate, 234 tmp_rate, best_rate)) { 235 best_parent = parent; 236 best_parent_rate = parent_rate; 237 best_rate = tmp_rate; 238 } 239 } 240 241 if (best_rate == 0) 242 return -EINVAL; 243 244 find: 245 req->best_parent_hw = best_parent; 246 req->best_parent_rate = best_parent_rate; 247 req->rate = best_rate; 248 return 0; 249 } 250 251 static int div_determine_rate(struct clk_hw *hw, 252 struct clk_rate_request *req) 253 { 254 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 255 256 return mux_helper_determine_rate(&div->common, req, 257 div_round_rate, div); 258 } 259 260 static unsigned long div_recalc_rate(struct clk_hw *hw, 261 unsigned long parent_rate) 262 { 263 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 264 unsigned long val; 265 266 val = div_helper_get_clockdiv(&div->common, &div->div); 267 if (val == 0) 268 return 0; 269 270 return divider_recalc_rate(hw, parent_rate, val, NULL, 271 div->div.flags, div->div.width); 272 } 273 274 static int div_set_rate(struct clk_hw *hw, unsigned long rate, 275 unsigned long parent_rate) 276 { 277 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 278 unsigned long val; 279 280 val = divider_get_val(rate, parent_rate, NULL, 281 div->div.width, div->div.flags); 282 283 return div_helper_set_rate(&div->common, &div->div, val); 284 } 285 286 const struct clk_ops cv1800_clk_div_ops = { 287 .disable = div_disable, 288 .enable = div_enable, 289 .is_enabled = div_is_enabled, 290 291 .determine_rate = div_determine_rate, 292 .recalc_rate = div_recalc_rate, 293 .set_rate = div_set_rate, 294 }; 295 296 static inline struct cv1800_clk_bypass_div * 297 hw_to_cv1800_clk_bypass_div(struct clk_hw *hw) 298 { 299 struct cv1800_clk_div *div = hw_to_cv1800_clk_div(hw); 300 301 return container_of(div, struct cv1800_clk_bypass_div, div); 302 } 303 304 static long bypass_div_round_rate(struct clk_hw *parent, 305 unsigned long *parent_rate, 306 unsigned long rate, int id, void *data) 307 { 308 struct cv1800_clk_bypass_div *div = data; 309 310 if (id == -1) { 311 if (cv1800_clk_checkbit(&div->div.common, &div->bypass)) 312 return *parent_rate; 313 else 314 return div_round_rate(parent, parent_rate, rate, 315 -1, &div->div); 316 } 317 318 if (id == 0) 319 return *parent_rate; 320 321 return div_round_rate(parent, parent_rate, rate, id - 1, &div->div); 322 } 323 324 static int bypass_div_determine_rate(struct clk_hw *hw, 325 struct clk_rate_request *req) 326 { 327 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw); 328 329 return mux_helper_determine_rate(&div->div.common, req, 330 bypass_div_round_rate, div); 331 } 332 333 static unsigned long bypass_div_recalc_rate(struct clk_hw *hw, 334 unsigned long parent_rate) 335 { 336 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw); 337 338 if (cv1800_clk_checkbit(&div->div.common, &div->bypass)) 339 return parent_rate; 340 341 return div_recalc_rate(hw, parent_rate); 342 } 343 344 static int bypass_div_set_rate(struct clk_hw *hw, unsigned long rate, 345 unsigned long parent_rate) 346 { 347 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw); 348 349 if (cv1800_clk_checkbit(&div->div.common, &div->bypass)) 350 return 0; 351 352 return div_set_rate(hw, rate, parent_rate); 353 } 354 355 static u8 bypass_div_get_parent(struct clk_hw *hw) 356 { 357 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw); 358 359 if (cv1800_clk_checkbit(&div->div.common, &div->bypass)) 360 return 0; 361 362 return 1; 363 } 364 365 static int bypass_div_set_parent(struct clk_hw *hw, u8 index) 366 { 367 struct cv1800_clk_bypass_div *div = hw_to_cv1800_clk_bypass_div(hw); 368 369 if (index) 370 return cv1800_clk_clearbit(&div->div.common, &div->bypass); 371 372 return cv1800_clk_setbit(&div->div.common, &div->bypass); 373 } 374 375 const struct clk_ops cv1800_clk_bypass_div_ops = { 376 .disable = div_disable, 377 .enable = div_enable, 378 .is_enabled = div_is_enabled, 379 380 .determine_rate = bypass_div_determine_rate, 381 .recalc_rate = bypass_div_recalc_rate, 382 .set_rate = bypass_div_set_rate, 383 384 .set_parent = bypass_div_set_parent, 385 .get_parent = bypass_div_get_parent, 386 }; 387 388 /* MUX */ 389 static inline struct cv1800_clk_mux *hw_to_cv1800_clk_mux(struct clk_hw *hw) 390 { 391 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw); 392 393 return container_of(common, struct cv1800_clk_mux, common); 394 } 395 396 static int mux_enable(struct clk_hw *hw) 397 { 398 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 399 400 return cv1800_clk_setbit(&mux->common, &mux->gate); 401 } 402 403 static void mux_disable(struct clk_hw *hw) 404 { 405 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 406 407 cv1800_clk_clearbit(&mux->common, &mux->gate); 408 } 409 410 static int mux_is_enabled(struct clk_hw *hw) 411 { 412 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 413 414 return cv1800_clk_checkbit(&mux->common, &mux->gate); 415 } 416 417 static long mux_round_rate(struct clk_hw *parent, unsigned long *parent_rate, 418 unsigned long rate, int id, void *data) 419 { 420 struct cv1800_clk_mux *mux = data; 421 422 return div_helper_round_rate(&mux->div, &mux->common.hw, parent, 423 rate, parent_rate); 424 } 425 426 static int mux_determine_rate(struct clk_hw *hw, 427 struct clk_rate_request *req) 428 { 429 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 430 431 return mux_helper_determine_rate(&mux->common, req, 432 mux_round_rate, mux); 433 } 434 435 static unsigned long mux_recalc_rate(struct clk_hw *hw, 436 unsigned long parent_rate) 437 { 438 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 439 unsigned long val; 440 441 val = div_helper_get_clockdiv(&mux->common, &mux->div); 442 if (val == 0) 443 return 0; 444 445 return divider_recalc_rate(hw, parent_rate, val, NULL, 446 mux->div.flags, mux->div.width); 447 } 448 449 static int mux_set_rate(struct clk_hw *hw, unsigned long rate, 450 unsigned long parent_rate) 451 { 452 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 453 unsigned long val; 454 455 val = divider_get_val(rate, parent_rate, NULL, 456 mux->div.width, mux->div.flags); 457 458 return div_helper_set_rate(&mux->common, &mux->div, val); 459 } 460 461 static u8 mux_get_parent(struct clk_hw *hw) 462 { 463 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 464 u32 reg = readl(mux->common.base + mux->mux.reg); 465 466 return cv1800_clk_regfield_get(reg, &mux->mux); 467 } 468 469 static int _mux_set_parent(struct cv1800_clk_mux *mux, u8 index) 470 { 471 u32 reg; 472 473 reg = readl(mux->common.base + mux->mux.reg); 474 reg = cv1800_clk_regfield_set(reg, index, &mux->mux); 475 writel(reg, mux->common.base + mux->mux.reg); 476 477 return 0; 478 } 479 480 static int mux_set_parent(struct clk_hw *hw, u8 index) 481 { 482 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 483 unsigned long flags; 484 485 spin_lock_irqsave(mux->common.lock, flags); 486 487 _mux_set_parent(mux, index); 488 489 spin_unlock_irqrestore(mux->common.lock, flags); 490 491 return 0; 492 } 493 494 const struct clk_ops cv1800_clk_mux_ops = { 495 .disable = mux_disable, 496 .enable = mux_enable, 497 .is_enabled = mux_is_enabled, 498 499 .determine_rate = mux_determine_rate, 500 .recalc_rate = mux_recalc_rate, 501 .set_rate = mux_set_rate, 502 503 .set_parent = mux_set_parent, 504 .get_parent = mux_get_parent, 505 }; 506 507 static inline struct cv1800_clk_bypass_mux * 508 hw_to_cv1800_clk_bypass_mux(struct clk_hw *hw) 509 { 510 struct cv1800_clk_mux *mux = hw_to_cv1800_clk_mux(hw); 511 512 return container_of(mux, struct cv1800_clk_bypass_mux, mux); 513 } 514 515 static long bypass_mux_round_rate(struct clk_hw *parent, 516 unsigned long *parent_rate, 517 unsigned long rate, int id, void *data) 518 { 519 struct cv1800_clk_bypass_mux *mux = data; 520 521 if (id == -1) { 522 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass)) 523 return *parent_rate; 524 else 525 return mux_round_rate(parent, parent_rate, rate, 526 -1, &mux->mux); 527 } 528 529 if (id == 0) 530 return *parent_rate; 531 532 return mux_round_rate(parent, parent_rate, rate, id - 1, &mux->mux); 533 } 534 535 static int bypass_mux_determine_rate(struct clk_hw *hw, 536 struct clk_rate_request *req) 537 { 538 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw); 539 540 return mux_helper_determine_rate(&mux->mux.common, req, 541 bypass_mux_round_rate, mux); 542 } 543 544 static unsigned long bypass_mux_recalc_rate(struct clk_hw *hw, 545 unsigned long parent_rate) 546 { 547 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw); 548 549 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass)) 550 return parent_rate; 551 552 return mux_recalc_rate(hw, parent_rate); 553 } 554 555 static int bypass_mux_set_rate(struct clk_hw *hw, unsigned long rate, 556 unsigned long parent_rate) 557 { 558 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw); 559 560 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass)) 561 return 0; 562 563 return mux_set_rate(hw, rate, parent_rate); 564 } 565 566 static u8 bypass_mux_get_parent(struct clk_hw *hw) 567 { 568 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw); 569 570 if (cv1800_clk_checkbit(&mux->mux.common, &mux->bypass)) 571 return 0; 572 573 return mux_get_parent(hw) + 1; 574 } 575 576 static int bypass_mux_set_parent(struct clk_hw *hw, u8 index) 577 { 578 struct cv1800_clk_bypass_mux *mux = hw_to_cv1800_clk_bypass_mux(hw); 579 580 if (index == 0) 581 return cv1800_clk_setbit(&mux->mux.common, &mux->bypass); 582 583 return cv1800_clk_clearbit(&mux->mux.common, &mux->bypass); 584 } 585 586 const struct clk_ops cv1800_clk_bypass_mux_ops = { 587 .disable = mux_disable, 588 .enable = mux_enable, 589 .is_enabled = mux_is_enabled, 590 591 .determine_rate = bypass_mux_determine_rate, 592 .recalc_rate = bypass_mux_recalc_rate, 593 .set_rate = bypass_mux_set_rate, 594 595 .set_parent = bypass_mux_set_parent, 596 .get_parent = bypass_mux_get_parent, 597 }; 598 599 /* MMUX */ 600 static inline struct cv1800_clk_mmux *hw_to_cv1800_clk_mmux(struct clk_hw *hw) 601 { 602 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw); 603 604 return container_of(common, struct cv1800_clk_mmux, common); 605 } 606 607 static u8 mmux_get_parent_id(struct cv1800_clk_mmux *mmux) 608 { 609 struct clk_hw *hw = &mmux->common.hw; 610 struct clk_hw *parent = clk_hw_get_parent(hw); 611 unsigned int i; 612 613 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 614 if (parent == clk_hw_get_parent_by_index(hw, i)) 615 return i; 616 } 617 618 BUG(); 619 } 620 621 static int mmux_enable(struct clk_hw *hw) 622 { 623 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 624 625 return cv1800_clk_setbit(&mmux->common, &mmux->gate); 626 } 627 628 static void mmux_disable(struct clk_hw *hw) 629 { 630 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 631 632 cv1800_clk_clearbit(&mmux->common, &mmux->gate); 633 } 634 635 static int mmux_is_enabled(struct clk_hw *hw) 636 { 637 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 638 639 return cv1800_clk_checkbit(&mmux->common, &mmux->gate); 640 } 641 642 static long mmux_round_rate(struct clk_hw *parent, unsigned long *parent_rate, 643 unsigned long rate, int id, void *data) 644 { 645 struct cv1800_clk_mmux *mmux = data; 646 s8 div_id; 647 648 if (id == -1) { 649 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass)) 650 return *parent_rate; 651 652 id = mmux_get_parent_id(mmux); 653 } 654 655 div_id = mmux->parent2sel[id]; 656 657 if (div_id < 0) 658 return *parent_rate; 659 660 return div_helper_round_rate(&mmux->div[div_id], 661 &mmux->common.hw, parent, 662 rate, parent_rate); 663 } 664 665 static int mmux_determine_rate(struct clk_hw *hw, 666 struct clk_rate_request *req) 667 { 668 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 669 670 return mux_helper_determine_rate(&mmux->common, req, 671 mmux_round_rate, mmux); 672 } 673 674 static unsigned long mmux_recalc_rate(struct clk_hw *hw, 675 unsigned long parent_rate) 676 { 677 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 678 unsigned long val; 679 struct cv1800_clk_regfield *div; 680 681 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass)) 682 return parent_rate; 683 684 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel)) 685 div = &mmux->div[0]; 686 else 687 div = &mmux->div[1]; 688 689 val = div_helper_get_clockdiv(&mmux->common, div); 690 if (val == 0) 691 return 0; 692 693 return divider_recalc_rate(hw, parent_rate, val, NULL, 694 div->flags, div->width); 695 } 696 697 static int mmux_set_rate(struct clk_hw *hw, unsigned long rate, 698 unsigned long parent_rate) 699 { 700 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 701 struct cv1800_clk_regfield *div; 702 unsigned long val; 703 704 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass)) 705 return parent_rate; 706 707 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel)) 708 div = &mmux->div[0]; 709 else 710 div = &mmux->div[1]; 711 712 val = divider_get_val(rate, parent_rate, NULL, 713 div->width, div->flags); 714 715 return div_helper_set_rate(&mmux->common, div, val); 716 } 717 718 static u8 mmux_get_parent(struct clk_hw *hw) 719 { 720 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 721 struct cv1800_clk_regfield *mux; 722 u32 reg; 723 s8 clk_sel; 724 725 if (cv1800_clk_checkbit(&mmux->common, &mmux->bypass)) 726 return 0; 727 728 if (cv1800_clk_checkbit(&mmux->common, &mmux->clk_sel)) 729 clk_sel = 0; 730 else 731 clk_sel = 1; 732 mux = &mmux->mux[clk_sel]; 733 734 reg = readl(mmux->common.base + mux->reg); 735 736 return mmux->sel2parent[clk_sel][cv1800_clk_regfield_get(reg, mux)]; 737 } 738 739 static int mmux_set_parent(struct clk_hw *hw, u8 index) 740 { 741 struct cv1800_clk_mmux *mmux = hw_to_cv1800_clk_mmux(hw); 742 struct cv1800_clk_regfield *mux; 743 unsigned long flags; 744 u32 reg; 745 s8 clk_sel = mmux->parent2sel[index]; 746 747 if (index == 0 || clk_sel == -1) { 748 cv1800_clk_setbit(&mmux->common, &mmux->bypass); 749 goto release; 750 } 751 752 cv1800_clk_clearbit(&mmux->common, &mmux->bypass); 753 754 if (clk_sel) 755 cv1800_clk_clearbit(&mmux->common, &mmux->clk_sel); 756 else 757 cv1800_clk_setbit(&mmux->common, &mmux->clk_sel); 758 759 spin_lock_irqsave(mmux->common.lock, flags); 760 761 mux = &mmux->mux[clk_sel]; 762 reg = readl(mmux->common.base + mux->reg); 763 reg = cv1800_clk_regfield_set(reg, index, mux); 764 765 writel(reg, mmux->common.base + mux->reg); 766 767 spin_unlock_irqrestore(mmux->common.lock, flags); 768 769 release: 770 return 0; 771 } 772 773 const struct clk_ops cv1800_clk_mmux_ops = { 774 .disable = mmux_disable, 775 .enable = mmux_enable, 776 .is_enabled = mmux_is_enabled, 777 778 .determine_rate = mmux_determine_rate, 779 .recalc_rate = mmux_recalc_rate, 780 .set_rate = mmux_set_rate, 781 782 .set_parent = mmux_set_parent, 783 .get_parent = mmux_get_parent, 784 }; 785 786 /* AUDIO CLK */ 787 static inline struct cv1800_clk_audio * 788 hw_to_cv1800_clk_audio(struct clk_hw *hw) 789 { 790 struct cv1800_clk_common *common = hw_to_cv1800_clk_common(hw); 791 792 return container_of(common, struct cv1800_clk_audio, common); 793 } 794 795 static int aclk_enable(struct clk_hw *hw) 796 { 797 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 798 799 cv1800_clk_setbit(&aclk->common, &aclk->src_en); 800 return cv1800_clk_setbit(&aclk->common, &aclk->output_en); 801 } 802 803 static void aclk_disable(struct clk_hw *hw) 804 { 805 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 806 807 cv1800_clk_clearbit(&aclk->common, &aclk->output_en); 808 cv1800_clk_clearbit(&aclk->common, &aclk->src_en); 809 } 810 811 static int aclk_is_enabled(struct clk_hw *hw) 812 { 813 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 814 815 return cv1800_clk_checkbit(&aclk->common, &aclk->output_en); 816 } 817 818 static int aclk_determine_rate(struct clk_hw *hw, 819 struct clk_rate_request *req) 820 { 821 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 822 823 req->rate = aclk->target_rate; 824 825 return 0; 826 } 827 828 static unsigned long aclk_recalc_rate(struct clk_hw *hw, 829 unsigned long parent_rate) 830 { 831 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 832 u64 rate = parent_rate; 833 u64 factor = 2; 834 u32 regval; 835 836 if (!cv1800_clk_checkbit(&aclk->common, &aclk->div_en)) 837 return 0; 838 839 regval = readl(aclk->common.base + aclk->m.reg); 840 factor *= cv1800_clk_regfield_get(regval, &aclk->m); 841 842 regval = readl(aclk->common.base + aclk->n.reg); 843 rate *= cv1800_clk_regfield_get(regval, &aclk->n); 844 845 return DIV64_U64_ROUND_UP(rate, factor); 846 } 847 848 static void aclk_determine_mn(unsigned long parent_rate, unsigned long rate, 849 u32 *m, u32 *n) 850 { 851 u32 tm = parent_rate / 2; 852 u32 tn = rate; 853 u32 tcommon = gcd(tm, tn); 854 *m = tm / tcommon; 855 *n = tn / tcommon; 856 } 857 858 static int aclk_set_rate(struct clk_hw *hw, unsigned long rate, 859 unsigned long parent_rate) 860 { 861 struct cv1800_clk_audio *aclk = hw_to_cv1800_clk_audio(hw); 862 unsigned long flags; 863 u32 m, n; 864 865 aclk_determine_mn(parent_rate, rate, 866 &m, &n); 867 868 spin_lock_irqsave(aclk->common.lock, flags); 869 870 writel(m, aclk->common.base + aclk->m.reg); 871 writel(n, aclk->common.base + aclk->n.reg); 872 873 cv1800_clk_setbit(&aclk->common, &aclk->div_en); 874 cv1800_clk_setbit(&aclk->common, &aclk->div_up); 875 876 spin_unlock_irqrestore(aclk->common.lock, flags); 877 878 return 0; 879 } 880 881 const struct clk_ops cv1800_clk_audio_ops = { 882 .disable = aclk_disable, 883 .enable = aclk_enable, 884 .is_enabled = aclk_is_enabled, 885 886 .determine_rate = aclk_determine_rate, 887 .recalc_rate = aclk_recalc_rate, 888 .set_rate = aclk_set_rate, 889 }; 890