xref: /linux/sound/pci/asihpi/hpifunc.c (revision 4705b2e8047221142af2ed5e37f54ac4c7f80a7d)
1 
2 #include "hpi_internal.h"
3 #include "hpimsginit.h"
4 
5 #include "hpidebug.h"
6 
7 struct hpi_handle {
8 	unsigned int obj_index:12;
9 	unsigned int obj_type:4;
10 	unsigned int adapter_index:14;
11 	unsigned int spare:1;
12 	unsigned int read_only:1;
13 };
14 
15 union handle_word {
16 	struct hpi_handle h;
17 	u32 w;
18 };
19 
20 u32 hpi_indexes_to_handle(const char c_object, const u16 adapter_index,
21 	const u16 object_index)
22 {
23 	union handle_word handle;
24 
25 	handle.h.adapter_index = adapter_index;
26 	handle.h.spare = 0;
27 	handle.h.read_only = 0;
28 	handle.h.obj_type = c_object;
29 	handle.h.obj_index = object_index;
30 	return handle.w;
31 }
32 
33 void hpi_handle_to_indexes(const u32 handle, u16 *pw_adapter_index,
34 	u16 *pw_object_index)
35 {
36 	union handle_word uhandle;
37 	uhandle.w = handle;
38 
39 	if (pw_adapter_index)
40 		*pw_adapter_index = (u16)uhandle.h.adapter_index;
41 	if (pw_object_index)
42 		*pw_object_index = (u16)uhandle.h.obj_index;
43 }
44 
45 char hpi_handle_object(const u32 handle)
46 {
47 	union handle_word uhandle;
48 	uhandle.w = handle;
49 	return (char)uhandle.h.obj_type;
50 }
51 
52 #define u32TOINDEX(h, i1) \
53 do {\
54 	if (h == 0) \
55 		return HPI_ERROR_INVALID_OBJ; \
56 	else \
57 		hpi_handle_to_indexes(h, i1, NULL); \
58 } while (0)
59 
60 #define u32TOINDEXES(h, i1, i2) \
61 do {\
62 	if (h == 0) \
63 		return HPI_ERROR_INVALID_OBJ; \
64 	else \
65 		hpi_handle_to_indexes(h, i1, i2);\
66 } while (0)
67 
68 void hpi_format_to_msg(struct hpi_msg_format *pMF,
69 	const struct hpi_format *pF)
70 {
71 	pMF->sample_rate = pF->sample_rate;
72 	pMF->bit_rate = pF->bit_rate;
73 	pMF->attributes = pF->attributes;
74 	pMF->channels = pF->channels;
75 	pMF->format = pF->format;
76 }
77 
78 static void hpi_msg_to_format(struct hpi_format *pF,
79 	struct hpi_msg_format *pMF)
80 {
81 	pF->sample_rate = pMF->sample_rate;
82 	pF->bit_rate = pMF->bit_rate;
83 	pF->attributes = pMF->attributes;
84 	pF->channels = pMF->channels;
85 	pF->format = pMF->format;
86 	pF->mode_legacy = 0;
87 	pF->unused = 0;
88 }
89 
90 void hpi_stream_response_to_legacy(struct hpi_stream_res *pSR)
91 {
92 	pSR->u.legacy_stream_info.auxiliary_data_available =
93 		pSR->u.stream_info.auxiliary_data_available;
94 	pSR->u.legacy_stream_info.state = pSR->u.stream_info.state;
95 }
96 
97 static struct hpi_hsubsys gh_subsys;
98 
99 struct hpi_hsubsys *hpi_subsys_create(void
100 	)
101 {
102 	struct hpi_message hm;
103 	struct hpi_response hr;
104 
105 	memset(&gh_subsys, 0, sizeof(struct hpi_hsubsys));
106 
107 	{
108 		hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
109 			HPI_SUBSYS_OPEN);
110 		hpi_send_recv(&hm, &hr);
111 
112 		if (hr.error == 0)
113 			return &gh_subsys;
114 
115 	}
116 	return NULL;
117 }
118 
119 void hpi_subsys_free(const struct hpi_hsubsys *ph_subsys)
120 {
121 	struct hpi_message hm;
122 	struct hpi_response hr;
123 
124 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
125 		HPI_SUBSYS_CLOSE);
126 	hpi_send_recv(&hm, &hr);
127 
128 }
129 
130 u16 hpi_subsys_get_version(const struct hpi_hsubsys *ph_subsys, u32 *pversion)
131 {
132 	struct hpi_message hm;
133 	struct hpi_response hr;
134 
135 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
136 		HPI_SUBSYS_GET_VERSION);
137 	hpi_send_recv(&hm, &hr);
138 	*pversion = hr.u.s.version;
139 	return hr.error;
140 }
141 
142 u16 hpi_subsys_get_version_ex(const struct hpi_hsubsys *ph_subsys,
143 	u32 *pversion_ex)
144 {
145 	struct hpi_message hm;
146 	struct hpi_response hr;
147 
148 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
149 		HPI_SUBSYS_GET_VERSION);
150 	hpi_send_recv(&hm, &hr);
151 	*pversion_ex = hr.u.s.data;
152 	return hr.error;
153 }
154 
155 u16 hpi_subsys_get_info(const struct hpi_hsubsys *ph_subsys, u32 *pversion,
156 	u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
157 {
158 	struct hpi_message hm;
159 	struct hpi_response hr;
160 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
161 		HPI_SUBSYS_GET_INFO);
162 
163 	hpi_send_recv(&hm, &hr);
164 
165 	*pversion = hr.u.s.version;
166 	if (list_length > HPI_MAX_ADAPTERS)
167 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
168 			HPI_MAX_ADAPTERS);
169 	else
170 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list, list_length);
171 	*pw_num_adapters = hr.u.s.num_adapters;
172 	return hr.error;
173 }
174 
175 u16 hpi_subsys_find_adapters(const struct hpi_hsubsys *ph_subsys,
176 	u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
177 {
178 	struct hpi_message hm;
179 	struct hpi_response hr;
180 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
181 		HPI_SUBSYS_FIND_ADAPTERS);
182 
183 	hpi_send_recv(&hm, &hr);
184 
185 	if (list_length > HPI_MAX_ADAPTERS) {
186 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
187 			HPI_MAX_ADAPTERS * sizeof(u16));
188 		memset(&aw_adapter_list[HPI_MAX_ADAPTERS], 0,
189 			(list_length - HPI_MAX_ADAPTERS) * sizeof(u16));
190 	} else
191 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
192 			list_length * sizeof(u16));
193 	*pw_num_adapters = hr.u.s.num_adapters;
194 
195 	return hr.error;
196 }
197 
198 u16 hpi_subsys_create_adapter(const struct hpi_hsubsys *ph_subsys,
199 	const struct hpi_resource *p_resource, u16 *pw_adapter_index)
200 {
201 	struct hpi_message hm;
202 	struct hpi_response hr;
203 
204 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
205 		HPI_SUBSYS_CREATE_ADAPTER);
206 	hm.u.s.resource = *p_resource;
207 
208 	hpi_send_recv(&hm, &hr);
209 
210 	*pw_adapter_index = hr.u.s.adapter_index;
211 	return hr.error;
212 }
213 
214 u16 hpi_subsys_delete_adapter(const struct hpi_hsubsys *ph_subsys,
215 	u16 adapter_index)
216 {
217 	struct hpi_message hm;
218 	struct hpi_response hr;
219 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
220 		HPI_SUBSYS_DELETE_ADAPTER);
221 	hm.adapter_index = adapter_index;
222 	hpi_send_recv(&hm, &hr);
223 	return hr.error;
224 }
225 
226 u16 hpi_subsys_get_num_adapters(const struct hpi_hsubsys *ph_subsys,
227 	int *pn_num_adapters)
228 {
229 	struct hpi_message hm;
230 	struct hpi_response hr;
231 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
232 		HPI_SUBSYS_GET_NUM_ADAPTERS);
233 	hpi_send_recv(&hm, &hr);
234 	*pn_num_adapters = (int)hr.u.s.num_adapters;
235 	return hr.error;
236 }
237 
238 u16 hpi_subsys_get_adapter(const struct hpi_hsubsys *ph_subsys, int iterator,
239 	u32 *padapter_index, u16 *pw_adapter_type)
240 {
241 	struct hpi_message hm;
242 	struct hpi_response hr;
243 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
244 		HPI_SUBSYS_GET_ADAPTER);
245 	hm.adapter_index = (u16)iterator;
246 	hpi_send_recv(&hm, &hr);
247 	*padapter_index = (int)hr.u.s.adapter_index;
248 	*pw_adapter_type = hr.u.s.aw_adapter_list[0];
249 	return hr.error;
250 }
251 
252 u16 hpi_subsys_set_host_network_interface(const struct hpi_hsubsys *ph_subsys,
253 	const char *sz_interface)
254 {
255 	struct hpi_message hm;
256 	struct hpi_response hr;
257 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
258 		HPI_SUBSYS_SET_NETWORK_INTERFACE);
259 	if (sz_interface == NULL)
260 		return HPI_ERROR_INVALID_RESOURCE;
261 	hm.u.s.resource.r.net_if = sz_interface;
262 	hpi_send_recv(&hm, &hr);
263 	return hr.error;
264 }
265 
266 u16 hpi_adapter_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
267 {
268 	struct hpi_message hm;
269 	struct hpi_response hr;
270 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
271 		HPI_ADAPTER_OPEN);
272 	hm.adapter_index = adapter_index;
273 
274 	hpi_send_recv(&hm, &hr);
275 
276 	return hr.error;
277 
278 }
279 
280 u16 hpi_adapter_close(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
281 {
282 	struct hpi_message hm;
283 	struct hpi_response hr;
284 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
285 		HPI_ADAPTER_CLOSE);
286 	hm.adapter_index = adapter_index;
287 
288 	hpi_send_recv(&hm, &hr);
289 
290 	return hr.error;
291 }
292 
293 u16 hpi_adapter_set_mode(const struct hpi_hsubsys *ph_subsys,
294 	u16 adapter_index, u32 adapter_mode)
295 {
296 	return hpi_adapter_set_mode_ex(ph_subsys, adapter_index, adapter_mode,
297 		HPI_ADAPTER_MODE_SET);
298 }
299 
300 u16 hpi_adapter_set_mode_ex(const struct hpi_hsubsys *ph_subsys,
301 	u16 adapter_index, u32 adapter_mode, u16 query_or_set)
302 {
303 	struct hpi_message hm;
304 	struct hpi_response hr;
305 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
306 		HPI_ADAPTER_SET_MODE);
307 	hm.adapter_index = adapter_index;
308 	hm.u.a.adapter_mode = adapter_mode;
309 	hm.u.a.assert_id = query_or_set;
310 	hpi_send_recv(&hm, &hr);
311 	return hr.error;
312 }
313 
314 u16 hpi_adapter_get_mode(const struct hpi_hsubsys *ph_subsys,
315 	u16 adapter_index, u32 *padapter_mode)
316 {
317 	struct hpi_message hm;
318 	struct hpi_response hr;
319 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
320 		HPI_ADAPTER_GET_MODE);
321 	hm.adapter_index = adapter_index;
322 	hpi_send_recv(&hm, &hr);
323 	if (padapter_mode)
324 		*padapter_mode = hr.u.a.serial_number;
325 	return hr.error;
326 }
327 
328 u16 hpi_adapter_get_info(const struct hpi_hsubsys *ph_subsys,
329 	u16 adapter_index, u16 *pw_num_outstreams, u16 *pw_num_instreams,
330 	u16 *pw_version, u32 *pserial_number, u16 *pw_adapter_type)
331 {
332 	struct hpi_message hm;
333 	struct hpi_response hr;
334 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
335 		HPI_ADAPTER_GET_INFO);
336 	hm.adapter_index = adapter_index;
337 
338 	hpi_send_recv(&hm, &hr);
339 
340 	*pw_adapter_type = hr.u.a.adapter_type;
341 	*pw_num_outstreams = hr.u.a.num_outstreams;
342 	*pw_num_instreams = hr.u.a.num_instreams;
343 	*pw_version = hr.u.a.version;
344 	*pserial_number = hr.u.a.serial_number;
345 	return hr.error;
346 }
347 
348 u16 hpi_adapter_get_module_by_index(const struct hpi_hsubsys *ph_subsys,
349 	u16 adapter_index, u16 module_index, u16 *pw_num_outputs,
350 	u16 *pw_num_inputs, u16 *pw_version, u32 *pserial_number,
351 	u16 *pw_module_type, u32 *ph_module)
352 {
353 	struct hpi_message hm;
354 	struct hpi_response hr;
355 
356 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
357 		HPI_ADAPTER_MODULE_INFO);
358 	hm.adapter_index = adapter_index;
359 	hm.u.ax.module_info.index = module_index;
360 
361 	hpi_send_recv(&hm, &hr);
362 
363 	*pw_module_type = hr.u.a.adapter_type;
364 	*pw_num_outputs = hr.u.a.num_outstreams;
365 	*pw_num_inputs = hr.u.a.num_instreams;
366 	*pw_version = hr.u.a.version;
367 	*pserial_number = hr.u.a.serial_number;
368 	*ph_module = 0;
369 
370 	return hr.error;
371 }
372 
373 u16 hpi_adapter_get_assert(const struct hpi_hsubsys *ph_subsys,
374 	u16 adapter_index, u16 *assert_present, char *psz_assert,
375 	u16 *pw_line_number)
376 {
377 	struct hpi_message hm;
378 	struct hpi_response hr;
379 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
380 		HPI_ADAPTER_GET_ASSERT);
381 	hm.adapter_index = adapter_index;
382 	hpi_send_recv(&hm, &hr);
383 
384 	*assert_present = 0;
385 
386 	if (!hr.error) {
387 
388 		*pw_line_number = (u16)hr.u.a.serial_number;
389 		if (*pw_line_number) {
390 
391 			int i;
392 			char *src = (char *)hr.u.a.sz_adapter_assert;
393 			char *dst = psz_assert;
394 
395 			*assert_present = 1;
396 
397 			for (i = 0; i < HPI_STRING_LEN; i++) {
398 				char c;
399 				c = *src++;
400 				*dst++ = c;
401 				if (c == 0)
402 					break;
403 			}
404 
405 		}
406 	}
407 	return hr.error;
408 }
409 
410 u16 hpi_adapter_get_assert_ex(const struct hpi_hsubsys *ph_subsys,
411 	u16 adapter_index, u16 *assert_present, char *psz_assert,
412 	u32 *pline_number, u16 *pw_assert_on_dsp)
413 {
414 	struct hpi_message hm;
415 	struct hpi_response hr;
416 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
417 		HPI_ADAPTER_GET_ASSERT);
418 	hm.adapter_index = adapter_index;
419 
420 	hpi_send_recv(&hm, &hr);
421 
422 	*assert_present = 0;
423 
424 	if (!hr.error) {
425 
426 		*pline_number = hr.u.a.serial_number;
427 
428 		*assert_present = hr.u.a.adapter_type;
429 
430 		*pw_assert_on_dsp = hr.u.a.adapter_index;
431 
432 		if (!*assert_present && *pline_number)
433 
434 			*assert_present = 1;
435 
436 		if (*assert_present) {
437 
438 			int i;
439 			char *src = (char *)hr.u.a.sz_adapter_assert;
440 			char *dst = psz_assert;
441 
442 			for (i = 0; i < HPI_STRING_LEN; i++) {
443 				char c;
444 				c = *src++;
445 				*dst++ = c;
446 				if (c == 0)
447 					break;
448 			}
449 
450 		} else {
451 			*psz_assert = 0;
452 		}
453 	}
454 	return hr.error;
455 }
456 
457 u16 hpi_adapter_test_assert(const struct hpi_hsubsys *ph_subsys,
458 	u16 adapter_index, u16 assert_id)
459 {
460 	struct hpi_message hm;
461 	struct hpi_response hr;
462 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
463 		HPI_ADAPTER_TEST_ASSERT);
464 	hm.adapter_index = adapter_index;
465 	hm.u.a.assert_id = assert_id;
466 
467 	hpi_send_recv(&hm, &hr);
468 
469 	return hr.error;
470 }
471 
472 u16 hpi_adapter_enable_capability(const struct hpi_hsubsys *ph_subsys,
473 	u16 adapter_index, u16 capability, u32 key)
474 {
475 	struct hpi_message hm;
476 	struct hpi_response hr;
477 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
478 		HPI_ADAPTER_ENABLE_CAPABILITY);
479 	hm.adapter_index = adapter_index;
480 	hm.u.a.assert_id = capability;
481 	hm.u.a.adapter_mode = key;
482 
483 	hpi_send_recv(&hm, &hr);
484 
485 	return hr.error;
486 }
487 
488 u16 hpi_adapter_self_test(const struct hpi_hsubsys *ph_subsys,
489 	u16 adapter_index)
490 {
491 	struct hpi_message hm;
492 	struct hpi_response hr;
493 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
494 		HPI_ADAPTER_SELFTEST);
495 	hm.adapter_index = adapter_index;
496 	hpi_send_recv(&hm, &hr);
497 	return hr.error;
498 }
499 
500 u16 hpi_adapter_debug_read(const struct hpi_hsubsys *ph_subsys,
501 	u16 adapter_index, u32 dsp_address, char *p_buffer, int *count_bytes)
502 {
503 	struct hpi_message hm;
504 	struct hpi_response hr;
505 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
506 		HPI_ADAPTER_DEBUG_READ);
507 
508 	hr.size = sizeof(hr);
509 
510 	hm.adapter_index = adapter_index;
511 	hm.u.ax.debug_read.dsp_address = dsp_address;
512 
513 	if (*count_bytes > sizeof(hr.u.bytes))
514 		*count_bytes = sizeof(hr.u.bytes);
515 
516 	hm.u.ax.debug_read.count_bytes = *count_bytes;
517 
518 	hpi_send_recv(&hm, &hr);
519 
520 	if (!hr.error) {
521 		*count_bytes = hr.size - 12;
522 		memcpy(p_buffer, &hr.u.bytes, *count_bytes);
523 	} else
524 		*count_bytes = 0;
525 	return hr.error;
526 }
527 
528 u16 hpi_adapter_set_property(const struct hpi_hsubsys *ph_subsys,
529 	u16 adapter_index, u16 property, u16 parameter1, u16 parameter2)
530 {
531 	struct hpi_message hm;
532 	struct hpi_response hr;
533 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
534 		HPI_ADAPTER_SET_PROPERTY);
535 	hm.adapter_index = adapter_index;
536 	hm.u.ax.property_set.property = property;
537 	hm.u.ax.property_set.parameter1 = parameter1;
538 	hm.u.ax.property_set.parameter2 = parameter2;
539 
540 	hpi_send_recv(&hm, &hr);
541 
542 	return hr.error;
543 }
544 
545 u16 hpi_adapter_get_property(const struct hpi_hsubsys *ph_subsys,
546 	u16 adapter_index, u16 property, u16 *pw_parameter1,
547 	u16 *pw_parameter2)
548 {
549 	struct hpi_message hm;
550 	struct hpi_response hr;
551 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
552 		HPI_ADAPTER_GET_PROPERTY);
553 	hm.adapter_index = adapter_index;
554 	hm.u.ax.property_set.property = property;
555 
556 	hpi_send_recv(&hm, &hr);
557 	if (!hr.error) {
558 		if (pw_parameter1)
559 			*pw_parameter1 = hr.u.ax.property_get.parameter1;
560 		if (pw_parameter2)
561 			*pw_parameter2 = hr.u.ax.property_get.parameter2;
562 	}
563 
564 	return hr.error;
565 }
566 
567 u16 hpi_adapter_enumerate_property(const struct hpi_hsubsys *ph_subsys,
568 	u16 adapter_index, u16 index, u16 what_to_enumerate,
569 	u16 property_index, u32 *psetting)
570 {
571 	return 0;
572 }
573 
574 u16 hpi_format_create(struct hpi_format *p_format, u16 channels, u16 format,
575 	u32 sample_rate, u32 bit_rate, u32 attributes)
576 {
577 	u16 error = 0;
578 	struct hpi_msg_format fmt;
579 
580 	switch (channels) {
581 	case 1:
582 	case 2:
583 	case 4:
584 	case 6:
585 	case 8:
586 	case 16:
587 		break;
588 	default:
589 		error = HPI_ERROR_INVALID_CHANNELS;
590 		return error;
591 	}
592 	fmt.channels = channels;
593 
594 	switch (format) {
595 	case HPI_FORMAT_PCM16_SIGNED:
596 	case HPI_FORMAT_PCM24_SIGNED:
597 	case HPI_FORMAT_PCM32_SIGNED:
598 	case HPI_FORMAT_PCM32_FLOAT:
599 	case HPI_FORMAT_PCM16_BIGENDIAN:
600 	case HPI_FORMAT_PCM8_UNSIGNED:
601 	case HPI_FORMAT_MPEG_L1:
602 	case HPI_FORMAT_MPEG_L2:
603 	case HPI_FORMAT_MPEG_L3:
604 	case HPI_FORMAT_DOLBY_AC2:
605 	case HPI_FORMAT_AA_TAGIT1_HITS:
606 	case HPI_FORMAT_AA_TAGIT1_INSERTS:
607 	case HPI_FORMAT_RAW_BITSTREAM:
608 	case HPI_FORMAT_AA_TAGIT1_HITS_EX1:
609 	case HPI_FORMAT_OEM1:
610 	case HPI_FORMAT_OEM2:
611 		break;
612 	default:
613 		error = HPI_ERROR_INVALID_FORMAT;
614 		return error;
615 	}
616 	fmt.format = format;
617 
618 	if (sample_rate < 8000L) {
619 		error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
620 		sample_rate = 8000L;
621 	}
622 	if (sample_rate > 200000L) {
623 		error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
624 		sample_rate = 200000L;
625 	}
626 	fmt.sample_rate = sample_rate;
627 
628 	switch (format) {
629 	case HPI_FORMAT_MPEG_L1:
630 	case HPI_FORMAT_MPEG_L2:
631 	case HPI_FORMAT_MPEG_L3:
632 		fmt.bit_rate = bit_rate;
633 		break;
634 	case HPI_FORMAT_PCM16_SIGNED:
635 	case HPI_FORMAT_PCM16_BIGENDIAN:
636 		fmt.bit_rate = channels * sample_rate * 2;
637 		break;
638 	case HPI_FORMAT_PCM32_SIGNED:
639 	case HPI_FORMAT_PCM32_FLOAT:
640 		fmt.bit_rate = channels * sample_rate * 4;
641 		break;
642 	case HPI_FORMAT_PCM8_UNSIGNED:
643 		fmt.bit_rate = channels * sample_rate;
644 		break;
645 	default:
646 		fmt.bit_rate = 0;
647 	}
648 
649 	switch (format) {
650 	case HPI_FORMAT_MPEG_L2:
651 		if ((channels == 1)
652 			&& (attributes != HPI_MPEG_MODE_DEFAULT)) {
653 			attributes = HPI_MPEG_MODE_DEFAULT;
654 			error = HPI_ERROR_INVALID_FORMAT;
655 		} else if (attributes > HPI_MPEG_MODE_DUALCHANNEL) {
656 			attributes = HPI_MPEG_MODE_DEFAULT;
657 			error = HPI_ERROR_INVALID_FORMAT;
658 		}
659 		fmt.attributes = attributes;
660 		break;
661 	default:
662 		fmt.attributes = attributes;
663 	}
664 
665 	hpi_msg_to_format(p_format, &fmt);
666 	return error;
667 }
668 
669 u16 hpi_stream_estimate_buffer_size(struct hpi_format *p_format,
670 	u32 host_polling_rate_in_milli_seconds, u32 *recommended_buffer_size)
671 {
672 
673 	u32 bytes_per_second;
674 	u32 size;
675 	u16 channels;
676 	struct hpi_format *pF = p_format;
677 
678 	channels = pF->channels;
679 
680 	switch (pF->format) {
681 	case HPI_FORMAT_PCM16_BIGENDIAN:
682 	case HPI_FORMAT_PCM16_SIGNED:
683 		bytes_per_second = pF->sample_rate * 2L * channels;
684 		break;
685 	case HPI_FORMAT_PCM24_SIGNED:
686 		bytes_per_second = pF->sample_rate * 3L * channels;
687 		break;
688 	case HPI_FORMAT_PCM32_SIGNED:
689 	case HPI_FORMAT_PCM32_FLOAT:
690 		bytes_per_second = pF->sample_rate * 4L * channels;
691 		break;
692 	case HPI_FORMAT_PCM8_UNSIGNED:
693 		bytes_per_second = pF->sample_rate * 1L * channels;
694 		break;
695 	case HPI_FORMAT_MPEG_L1:
696 	case HPI_FORMAT_MPEG_L2:
697 	case HPI_FORMAT_MPEG_L3:
698 		bytes_per_second = pF->bit_rate / 8L;
699 		break;
700 	case HPI_FORMAT_DOLBY_AC2:
701 
702 		bytes_per_second = 256000L / 8L;
703 		break;
704 	default:
705 		return HPI_ERROR_INVALID_FORMAT;
706 	}
707 	size = (bytes_per_second * host_polling_rate_in_milli_seconds * 2) /
708 		1000L;
709 
710 	*recommended_buffer_size =
711 		roundup_pow_of_two(((size + 4095L) & ~4095L));
712 	return 0;
713 }
714 
715 u16 hpi_outstream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
716 	u16 outstream_index, u32 *ph_outstream)
717 {
718 	struct hpi_message hm;
719 	struct hpi_response hr;
720 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
721 		HPI_OSTREAM_OPEN);
722 	hm.adapter_index = adapter_index;
723 	hm.obj_index = outstream_index;
724 
725 	hpi_send_recv(&hm, &hr);
726 
727 	if (hr.error == 0)
728 		*ph_outstream =
729 			hpi_indexes_to_handle(HPI_OBJ_OSTREAM, adapter_index,
730 			outstream_index);
731 	else
732 		*ph_outstream = 0;
733 	return hr.error;
734 }
735 
736 u16 hpi_outstream_close(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
737 {
738 	struct hpi_message hm;
739 	struct hpi_response hr;
740 
741 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
742 		HPI_OSTREAM_HOSTBUFFER_FREE);
743 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
744 	hpi_send_recv(&hm, &hr);
745 
746 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
747 		HPI_OSTREAM_GROUP_RESET);
748 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
749 	hpi_send_recv(&hm, &hr);
750 
751 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
752 		HPI_OSTREAM_CLOSE);
753 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
754 	hpi_send_recv(&hm, &hr);
755 
756 	return hr.error;
757 }
758 
759 u16 hpi_outstream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
760 	u32 h_outstream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_to_play,
761 	u32 *psamples_played, u32 *pauxiliary_data_to_play)
762 {
763 	struct hpi_message hm;
764 	struct hpi_response hr;
765 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
766 		HPI_OSTREAM_GET_INFO);
767 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
768 
769 	hpi_send_recv(&hm, &hr);
770 
771 	if (pw_state)
772 		*pw_state = hr.u.d.u.stream_info.state;
773 	if (pbuffer_size)
774 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
775 	if (pdata_to_play)
776 		*pdata_to_play = hr.u.d.u.stream_info.data_available;
777 	if (psamples_played)
778 		*psamples_played = hr.u.d.u.stream_info.samples_transferred;
779 	if (pauxiliary_data_to_play)
780 		*pauxiliary_data_to_play =
781 			hr.u.d.u.stream_info.auxiliary_data_available;
782 	return hr.error;
783 }
784 
785 u16 hpi_outstream_write_buf(const struct hpi_hsubsys *ph_subsys,
786 	u32 h_outstream, const u8 *pb_data, u32 bytes_to_write,
787 	const struct hpi_format *p_format)
788 {
789 	struct hpi_message hm;
790 	struct hpi_response hr;
791 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
792 		HPI_OSTREAM_WRITE);
793 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
794 	hm.u.d.u.data.pb_data = (u8 *)pb_data;
795 	hm.u.d.u.data.data_size = bytes_to_write;
796 
797 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
798 
799 	hpi_send_recv(&hm, &hr);
800 
801 	return hr.error;
802 }
803 
804 u16 hpi_outstream_start(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
805 {
806 	struct hpi_message hm;
807 	struct hpi_response hr;
808 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
809 		HPI_OSTREAM_START);
810 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
811 
812 	hpi_send_recv(&hm, &hr);
813 
814 	return hr.error;
815 }
816 
817 u16 hpi_outstream_wait_start(const struct hpi_hsubsys *ph_subsys,
818 	u32 h_outstream)
819 {
820 	struct hpi_message hm;
821 	struct hpi_response hr;
822 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
823 		HPI_OSTREAM_WAIT_START);
824 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
825 
826 	hpi_send_recv(&hm, &hr);
827 
828 	return hr.error;
829 }
830 
831 u16 hpi_outstream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
832 {
833 	struct hpi_message hm;
834 	struct hpi_response hr;
835 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
836 		HPI_OSTREAM_STOP);
837 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
838 
839 	hpi_send_recv(&hm, &hr);
840 
841 	return hr.error;
842 }
843 
844 u16 hpi_outstream_sinegen(const struct hpi_hsubsys *ph_subsys,
845 	u32 h_outstream)
846 {
847 	struct hpi_message hm;
848 	struct hpi_response hr;
849 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
850 		HPI_OSTREAM_SINEGEN);
851 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
852 
853 	hpi_send_recv(&hm, &hr);
854 
855 	return hr.error;
856 }
857 
858 u16 hpi_outstream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
859 {
860 	struct hpi_message hm;
861 	struct hpi_response hr;
862 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
863 		HPI_OSTREAM_RESET);
864 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
865 
866 	hpi_send_recv(&hm, &hr);
867 
868 	return hr.error;
869 }
870 
871 u16 hpi_outstream_query_format(const struct hpi_hsubsys *ph_subsys,
872 	u32 h_outstream, struct hpi_format *p_format)
873 {
874 	struct hpi_message hm;
875 	struct hpi_response hr;
876 
877 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
878 		HPI_OSTREAM_QUERY_FORMAT);
879 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
880 
881 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
882 
883 	hpi_send_recv(&hm, &hr);
884 
885 	return hr.error;
886 }
887 
888 u16 hpi_outstream_set_format(const struct hpi_hsubsys *ph_subsys,
889 	u32 h_outstream, struct hpi_format *p_format)
890 {
891 	struct hpi_message hm;
892 	struct hpi_response hr;
893 
894 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
895 		HPI_OSTREAM_SET_FORMAT);
896 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
897 
898 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
899 
900 	hpi_send_recv(&hm, &hr);
901 
902 	return hr.error;
903 }
904 
905 u16 hpi_outstream_set_velocity(const struct hpi_hsubsys *ph_subsys,
906 	u32 h_outstream, short velocity)
907 {
908 	struct hpi_message hm;
909 	struct hpi_response hr;
910 
911 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
912 		HPI_OSTREAM_SET_VELOCITY);
913 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
914 	hm.u.d.u.velocity = velocity;
915 
916 	hpi_send_recv(&hm, &hr);
917 
918 	return hr.error;
919 }
920 
921 u16 hpi_outstream_set_punch_in_out(const struct hpi_hsubsys *ph_subsys,
922 	u32 h_outstream, u32 punch_in_sample, u32 punch_out_sample)
923 {
924 	struct hpi_message hm;
925 	struct hpi_response hr;
926 
927 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
928 		HPI_OSTREAM_SET_PUNCHINOUT);
929 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
930 
931 	hm.u.d.u.pio.punch_in_sample = punch_in_sample;
932 	hm.u.d.u.pio.punch_out_sample = punch_out_sample;
933 
934 	hpi_send_recv(&hm, &hr);
935 
936 	return hr.error;
937 }
938 
939 u16 hpi_outstream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
940 	u32 h_outstream, u16 mode)
941 {
942 	struct hpi_message hm;
943 	struct hpi_response hr;
944 
945 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
946 		HPI_OSTREAM_ANC_RESET);
947 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
948 	hm.u.d.u.data.format.channels = mode;
949 	hpi_send_recv(&hm, &hr);
950 	return hr.error;
951 }
952 
953 u16 hpi_outstream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
954 	u32 h_outstream, u32 *pframes_available)
955 {
956 	struct hpi_message hm;
957 	struct hpi_response hr;
958 
959 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
960 		HPI_OSTREAM_ANC_GET_INFO);
961 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
962 	hpi_send_recv(&hm, &hr);
963 	if (hr.error == 0) {
964 		if (pframes_available)
965 			*pframes_available =
966 				hr.u.d.u.stream_info.data_available /
967 				sizeof(struct hpi_anc_frame);
968 	}
969 	return hr.error;
970 }
971 
972 u16 hpi_outstream_ancillary_read(const struct hpi_hsubsys *ph_subsys,
973 	u32 h_outstream, struct hpi_anc_frame *p_anc_frame_buffer,
974 	u32 anc_frame_buffer_size_in_bytes,
975 	u32 number_of_ancillary_frames_to_read)
976 {
977 	struct hpi_message hm;
978 	struct hpi_response hr;
979 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
980 		HPI_OSTREAM_ANC_READ);
981 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
982 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
983 	hm.u.d.u.data.data_size =
984 		number_of_ancillary_frames_to_read *
985 		sizeof(struct hpi_anc_frame);
986 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
987 		hpi_send_recv(&hm, &hr);
988 	else
989 		hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
990 	return hr.error;
991 }
992 
993 u16 hpi_outstream_set_time_scale(const struct hpi_hsubsys *ph_subsys,
994 	u32 h_outstream, u32 time_scale)
995 {
996 	struct hpi_message hm;
997 	struct hpi_response hr;
998 
999 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1000 		HPI_OSTREAM_SET_TIMESCALE);
1001 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1002 
1003 	hm.u.d.u.time_scale = time_scale;
1004 
1005 	hpi_send_recv(&hm, &hr);
1006 
1007 	return hr.error;
1008 }
1009 
1010 u16 hpi_outstream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1011 	u32 h_outstream, u32 size_in_bytes)
1012 {
1013 	struct hpi_message hm;
1014 	struct hpi_response hr;
1015 
1016 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1017 		HPI_OSTREAM_HOSTBUFFER_ALLOC);
1018 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1019 	hm.u.d.u.data.data_size = size_in_bytes;
1020 	hpi_send_recv(&hm, &hr);
1021 	return hr.error;
1022 }
1023 
1024 u16 hpi_outstream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1025 	u32 h_outstream, u8 **pp_buffer,
1026 	struct hpi_hostbuffer_status **pp_status)
1027 {
1028 	struct hpi_message hm;
1029 	struct hpi_response hr;
1030 
1031 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1032 		HPI_OSTREAM_HOSTBUFFER_GET_INFO);
1033 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1034 	hpi_send_recv(&hm, &hr);
1035 
1036 	if (hr.error == 0) {
1037 		if (pp_buffer)
1038 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1039 		if (pp_status)
1040 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
1041 	}
1042 	return hr.error;
1043 }
1044 
1045 u16 hpi_outstream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1046 	u32 h_outstream)
1047 {
1048 	struct hpi_message hm;
1049 	struct hpi_response hr;
1050 
1051 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1052 		HPI_OSTREAM_HOSTBUFFER_FREE);
1053 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1054 	hpi_send_recv(&hm, &hr);
1055 	return hr.error;
1056 }
1057 
1058 u16 hpi_outstream_group_add(const struct hpi_hsubsys *ph_subsys,
1059 	u32 h_outstream, u32 h_stream)
1060 {
1061 	struct hpi_message hm;
1062 	struct hpi_response hr;
1063 	u16 adapter;
1064 	char c_obj_type;
1065 
1066 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1067 		HPI_OSTREAM_GROUP_ADD);
1068 	hr.error = 0;
1069 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1070 	c_obj_type = hpi_handle_object(h_stream);
1071 	switch (c_obj_type) {
1072 	case HPI_OBJ_OSTREAM:
1073 		hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1074 		u32TOINDEXES(h_stream, &adapter,
1075 			&hm.u.d.u.stream.stream_index);
1076 		break;
1077 	case HPI_OBJ_ISTREAM:
1078 		hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1079 		u32TOINDEXES(h_stream, &adapter,
1080 			&hm.u.d.u.stream.stream_index);
1081 		break;
1082 	default:
1083 		return HPI_ERROR_INVALID_STREAM;
1084 	}
1085 	if (adapter != hm.adapter_index)
1086 		return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1087 
1088 	hpi_send_recv(&hm, &hr);
1089 	return hr.error;
1090 }
1091 
1092 u16 hpi_outstream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1093 	u32 h_outstream, u32 *poutstream_map, u32 *pinstream_map)
1094 {
1095 	struct hpi_message hm;
1096 	struct hpi_response hr;
1097 
1098 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1099 		HPI_OSTREAM_GROUP_GETMAP);
1100 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1101 	hpi_send_recv(&hm, &hr);
1102 
1103 	if (poutstream_map)
1104 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1105 	if (pinstream_map)
1106 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
1107 
1108 	return hr.error;
1109 }
1110 
1111 u16 hpi_outstream_group_reset(const struct hpi_hsubsys *ph_subsys,
1112 	u32 h_outstream)
1113 {
1114 	struct hpi_message hm;
1115 	struct hpi_response hr;
1116 
1117 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1118 		HPI_OSTREAM_GROUP_RESET);
1119 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1120 	hpi_send_recv(&hm, &hr);
1121 	return hr.error;
1122 }
1123 
1124 u16 hpi_instream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1125 	u16 instream_index, u32 *ph_instream)
1126 {
1127 	struct hpi_message hm;
1128 	struct hpi_response hr;
1129 
1130 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1131 		HPI_ISTREAM_OPEN);
1132 	hm.adapter_index = adapter_index;
1133 	hm.obj_index = instream_index;
1134 
1135 	hpi_send_recv(&hm, &hr);
1136 
1137 	if (hr.error == 0)
1138 		*ph_instream =
1139 			hpi_indexes_to_handle(HPI_OBJ_ISTREAM, adapter_index,
1140 			instream_index);
1141 	else
1142 		*ph_instream = 0;
1143 
1144 	return hr.error;
1145 }
1146 
1147 u16 hpi_instream_close(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1148 {
1149 	struct hpi_message hm;
1150 	struct hpi_response hr;
1151 
1152 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1153 		HPI_ISTREAM_HOSTBUFFER_FREE);
1154 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1155 	hpi_send_recv(&hm, &hr);
1156 
1157 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1158 		HPI_ISTREAM_GROUP_RESET);
1159 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1160 	hpi_send_recv(&hm, &hr);
1161 
1162 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1163 		HPI_ISTREAM_CLOSE);
1164 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1165 	hpi_send_recv(&hm, &hr);
1166 
1167 	return hr.error;
1168 }
1169 
1170 u16 hpi_instream_query_format(const struct hpi_hsubsys *ph_subsys,
1171 	u32 h_instream, const struct hpi_format *p_format)
1172 {
1173 	struct hpi_message hm;
1174 	struct hpi_response hr;
1175 
1176 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1177 		HPI_ISTREAM_QUERY_FORMAT);
1178 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1179 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1180 
1181 	hpi_send_recv(&hm, &hr);
1182 
1183 	return hr.error;
1184 }
1185 
1186 u16 hpi_instream_set_format(const struct hpi_hsubsys *ph_subsys,
1187 	u32 h_instream, const struct hpi_format *p_format)
1188 {
1189 	struct hpi_message hm;
1190 	struct hpi_response hr;
1191 
1192 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1193 		HPI_ISTREAM_SET_FORMAT);
1194 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1195 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1196 
1197 	hpi_send_recv(&hm, &hr);
1198 
1199 	return hr.error;
1200 }
1201 
1202 u16 hpi_instream_read_buf(const struct hpi_hsubsys *ph_subsys, u32 h_instream,
1203 	u8 *pb_data, u32 bytes_to_read)
1204 {
1205 	struct hpi_message hm;
1206 	struct hpi_response hr;
1207 
1208 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1209 		HPI_ISTREAM_READ);
1210 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1211 	hm.u.d.u.data.data_size = bytes_to_read;
1212 	hm.u.d.u.data.pb_data = pb_data;
1213 
1214 	hpi_send_recv(&hm, &hr);
1215 
1216 	return hr.error;
1217 }
1218 
1219 u16 hpi_instream_start(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1220 {
1221 	struct hpi_message hm;
1222 	struct hpi_response hr;
1223 
1224 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1225 		HPI_ISTREAM_START);
1226 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1227 
1228 	hpi_send_recv(&hm, &hr);
1229 
1230 	return hr.error;
1231 }
1232 
1233 u16 hpi_instream_wait_start(const struct hpi_hsubsys *ph_subsys,
1234 	u32 h_instream)
1235 {
1236 	struct hpi_message hm;
1237 	struct hpi_response hr;
1238 
1239 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1240 		HPI_ISTREAM_WAIT_START);
1241 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1242 
1243 	hpi_send_recv(&hm, &hr);
1244 
1245 	return hr.error;
1246 }
1247 
1248 u16 hpi_instream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1249 {
1250 	struct hpi_message hm;
1251 	struct hpi_response hr;
1252 
1253 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1254 		HPI_ISTREAM_STOP);
1255 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1256 
1257 	hpi_send_recv(&hm, &hr);
1258 
1259 	return hr.error;
1260 }
1261 
1262 u16 hpi_instream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1263 {
1264 	struct hpi_message hm;
1265 	struct hpi_response hr;
1266 
1267 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1268 		HPI_ISTREAM_RESET);
1269 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1270 
1271 	hpi_send_recv(&hm, &hr);
1272 
1273 	return hr.error;
1274 }
1275 
1276 u16 hpi_instream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
1277 	u32 h_instream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_recorded,
1278 	u32 *psamples_recorded, u32 *pauxiliary_data_recorded)
1279 {
1280 	struct hpi_message hm;
1281 	struct hpi_response hr;
1282 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1283 		HPI_ISTREAM_GET_INFO);
1284 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1285 
1286 	hpi_send_recv(&hm, &hr);
1287 
1288 	if (pw_state)
1289 		*pw_state = hr.u.d.u.stream_info.state;
1290 	if (pbuffer_size)
1291 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
1292 	if (pdata_recorded)
1293 		*pdata_recorded = hr.u.d.u.stream_info.data_available;
1294 	if (psamples_recorded)
1295 		*psamples_recorded = hr.u.d.u.stream_info.samples_transferred;
1296 	if (pauxiliary_data_recorded)
1297 		*pauxiliary_data_recorded =
1298 			hr.u.d.u.stream_info.auxiliary_data_available;
1299 	return hr.error;
1300 }
1301 
1302 u16 hpi_instream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
1303 	u32 h_instream, u16 bytes_per_frame, u16 mode, u16 alignment,
1304 	u16 idle_bit)
1305 {
1306 	struct hpi_message hm;
1307 	struct hpi_response hr;
1308 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1309 		HPI_ISTREAM_ANC_RESET);
1310 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1311 	hm.u.d.u.data.format.attributes = bytes_per_frame;
1312 	hm.u.d.u.data.format.format = (mode << 8) | (alignment & 0xff);
1313 	hm.u.d.u.data.format.channels = idle_bit;
1314 	hpi_send_recv(&hm, &hr);
1315 	return hr.error;
1316 }
1317 
1318 u16 hpi_instream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
1319 	u32 h_instream, u32 *pframe_space)
1320 {
1321 	struct hpi_message hm;
1322 	struct hpi_response hr;
1323 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1324 		HPI_ISTREAM_ANC_GET_INFO);
1325 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1326 	hpi_send_recv(&hm, &hr);
1327 	if (pframe_space)
1328 		*pframe_space =
1329 			(hr.u.d.u.stream_info.buffer_size -
1330 			hr.u.d.u.stream_info.data_available) /
1331 			sizeof(struct hpi_anc_frame);
1332 	return hr.error;
1333 }
1334 
1335 u16 hpi_instream_ancillary_write(const struct hpi_hsubsys *ph_subsys,
1336 	u32 h_instream, const struct hpi_anc_frame *p_anc_frame_buffer,
1337 	u32 anc_frame_buffer_size_in_bytes,
1338 	u32 number_of_ancillary_frames_to_write)
1339 {
1340 	struct hpi_message hm;
1341 	struct hpi_response hr;
1342 
1343 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1344 		HPI_ISTREAM_ANC_WRITE);
1345 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1346 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
1347 	hm.u.d.u.data.data_size =
1348 		number_of_ancillary_frames_to_write *
1349 		sizeof(struct hpi_anc_frame);
1350 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
1351 		hpi_send_recv(&hm, &hr);
1352 	else
1353 		hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
1354 	return hr.error;
1355 }
1356 
1357 u16 hpi_instream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1358 	u32 h_instream, u32 size_in_bytes)
1359 {
1360 
1361 	struct hpi_message hm;
1362 	struct hpi_response hr;
1363 
1364 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1365 		HPI_ISTREAM_HOSTBUFFER_ALLOC);
1366 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1367 	hm.u.d.u.data.data_size = size_in_bytes;
1368 	hpi_send_recv(&hm, &hr);
1369 	return hr.error;
1370 }
1371 
1372 u16 hpi_instream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1373 	u32 h_instream, u8 **pp_buffer,
1374 	struct hpi_hostbuffer_status **pp_status)
1375 {
1376 	struct hpi_message hm;
1377 	struct hpi_response hr;
1378 
1379 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1380 		HPI_ISTREAM_HOSTBUFFER_GET_INFO);
1381 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1382 	hpi_send_recv(&hm, &hr);
1383 
1384 	if (hr.error == 0) {
1385 		if (pp_buffer)
1386 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1387 		if (pp_status)
1388 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
1389 	}
1390 	return hr.error;
1391 }
1392 
1393 u16 hpi_instream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1394 	u32 h_instream)
1395 {
1396 
1397 	struct hpi_message hm;
1398 	struct hpi_response hr;
1399 
1400 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1401 		HPI_ISTREAM_HOSTBUFFER_FREE);
1402 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1403 	hpi_send_recv(&hm, &hr);
1404 	return hr.error;
1405 }
1406 
1407 u16 hpi_instream_group_add(const struct hpi_hsubsys *ph_subsys,
1408 	u32 h_instream, u32 h_stream)
1409 {
1410 	struct hpi_message hm;
1411 	struct hpi_response hr;
1412 	u16 adapter;
1413 	char c_obj_type;
1414 
1415 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1416 		HPI_ISTREAM_GROUP_ADD);
1417 	hr.error = 0;
1418 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1419 	c_obj_type = hpi_handle_object(h_stream);
1420 
1421 	switch (c_obj_type) {
1422 	case HPI_OBJ_OSTREAM:
1423 		hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1424 		u32TOINDEXES(h_stream, &adapter,
1425 			&hm.u.d.u.stream.stream_index);
1426 		break;
1427 	case HPI_OBJ_ISTREAM:
1428 		hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1429 		u32TOINDEXES(h_stream, &adapter,
1430 			&hm.u.d.u.stream.stream_index);
1431 		break;
1432 	default:
1433 		return HPI_ERROR_INVALID_STREAM;
1434 	}
1435 
1436 	if (adapter != hm.adapter_index)
1437 		return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1438 
1439 	hpi_send_recv(&hm, &hr);
1440 	return hr.error;
1441 }
1442 
1443 u16 hpi_instream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1444 	u32 h_instream, u32 *poutstream_map, u32 *pinstream_map)
1445 {
1446 	struct hpi_message hm;
1447 	struct hpi_response hr;
1448 
1449 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1450 		HPI_ISTREAM_HOSTBUFFER_FREE);
1451 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1452 	hpi_send_recv(&hm, &hr);
1453 
1454 	if (poutstream_map)
1455 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1456 	if (pinstream_map)
1457 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
1458 
1459 	return hr.error;
1460 }
1461 
1462 u16 hpi_instream_group_reset(const struct hpi_hsubsys *ph_subsys,
1463 	u32 h_instream)
1464 {
1465 	struct hpi_message hm;
1466 	struct hpi_response hr;
1467 
1468 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1469 		HPI_ISTREAM_GROUP_RESET);
1470 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1471 	hpi_send_recv(&hm, &hr);
1472 	return hr.error;
1473 }
1474 
1475 u16 hpi_mixer_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1476 	u32 *ph_mixer)
1477 {
1478 	struct hpi_message hm;
1479 	struct hpi_response hr;
1480 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_OPEN);
1481 	hm.adapter_index = adapter_index;
1482 
1483 	hpi_send_recv(&hm, &hr);
1484 
1485 	if (hr.error == 0)
1486 		*ph_mixer =
1487 			hpi_indexes_to_handle(HPI_OBJ_MIXER, adapter_index,
1488 			0);
1489 	else
1490 		*ph_mixer = 0;
1491 	return hr.error;
1492 }
1493 
1494 u16 hpi_mixer_close(const struct hpi_hsubsys *ph_subsys, u32 h_mixer)
1495 {
1496 	struct hpi_message hm;
1497 	struct hpi_response hr;
1498 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_CLOSE);
1499 	u32TOINDEX(h_mixer, &hm.adapter_index);
1500 	hpi_send_recv(&hm, &hr);
1501 	return hr.error;
1502 }
1503 
1504 u16 hpi_mixer_get_control(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1505 	u16 src_node_type, u16 src_node_type_index, u16 dst_node_type,
1506 	u16 dst_node_type_index, u16 control_type, u32 *ph_control)
1507 {
1508 	struct hpi_message hm;
1509 	struct hpi_response hr;
1510 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1511 		HPI_MIXER_GET_CONTROL);
1512 	u32TOINDEX(h_mixer, &hm.adapter_index);
1513 	hm.u.m.node_type1 = src_node_type;
1514 	hm.u.m.node_index1 = src_node_type_index;
1515 	hm.u.m.node_type2 = dst_node_type;
1516 	hm.u.m.node_index2 = dst_node_type_index;
1517 	hm.u.m.control_type = control_type;
1518 
1519 	hpi_send_recv(&hm, &hr);
1520 
1521 	if (hr.error == 0)
1522 		*ph_control =
1523 			hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1524 			hm.adapter_index, hr.u.m.control_index);
1525 	else
1526 		*ph_control = 0;
1527 	return hr.error;
1528 }
1529 
1530 u16 hpi_mixer_get_control_by_index(const struct hpi_hsubsys *ph_subsys,
1531 	u32 h_mixer, u16 control_index, u16 *pw_src_node_type,
1532 	u16 *pw_src_node_index, u16 *pw_dst_node_type, u16 *pw_dst_node_index,
1533 	u16 *pw_control_type, u32 *ph_control)
1534 {
1535 	struct hpi_message hm;
1536 	struct hpi_response hr;
1537 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1538 		HPI_MIXER_GET_CONTROL_BY_INDEX);
1539 	u32TOINDEX(h_mixer, &hm.adapter_index);
1540 	hm.u.m.control_index = control_index;
1541 	hpi_send_recv(&hm, &hr);
1542 
1543 	if (pw_src_node_type) {
1544 		*pw_src_node_type =
1545 			hr.u.m.src_node_type + HPI_SOURCENODE_NONE;
1546 		*pw_src_node_index = hr.u.m.src_node_index;
1547 		*pw_dst_node_type = hr.u.m.dst_node_type + HPI_DESTNODE_NONE;
1548 		*pw_dst_node_index = hr.u.m.dst_node_index;
1549 	}
1550 	if (pw_control_type)
1551 		*pw_control_type = hr.u.m.control_index;
1552 
1553 	if (ph_control) {
1554 		if (hr.error == 0)
1555 			*ph_control =
1556 				hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1557 				hm.adapter_index, control_index);
1558 		else
1559 			*ph_control = 0;
1560 	}
1561 	return hr.error;
1562 }
1563 
1564 u16 hpi_mixer_store(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1565 	enum HPI_MIXER_STORE_COMMAND command, u16 index)
1566 {
1567 	struct hpi_message hm;
1568 	struct hpi_response hr;
1569 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_STORE);
1570 	u32TOINDEX(h_mixer, &hm.adapter_index);
1571 	hm.u.mx.store.command = command;
1572 	hm.u.mx.store.index = index;
1573 	hpi_send_recv(&hm, &hr);
1574 	return hr.error;
1575 }
1576 
1577 static
1578 u16 hpi_control_param_set(const struct hpi_hsubsys *ph_subsys,
1579 	const u32 h_control, const u16 attrib, const u32 param1,
1580 	const u32 param2)
1581 {
1582 	struct hpi_message hm;
1583 	struct hpi_response hr;
1584 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1585 		HPI_CONTROL_SET_STATE);
1586 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1587 	hm.u.c.attribute = attrib;
1588 	hm.u.c.param1 = param1;
1589 	hm.u.c.param2 = param2;
1590 	hpi_send_recv(&hm, &hr);
1591 	return hr.error;
1592 }
1593 
1594 static
1595 u16 hpi_control_param_get(const struct hpi_hsubsys *ph_subsys,
1596 	const u32 h_control, const u16 attrib, u32 param1, u32 param2,
1597 	u32 *pparam1, u32 *pparam2)
1598 {
1599 	struct hpi_message hm;
1600 	struct hpi_response hr;
1601 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1602 		HPI_CONTROL_GET_STATE);
1603 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1604 	hm.u.c.attribute = attrib;
1605 	hm.u.c.param1 = param1;
1606 	hm.u.c.param2 = param2;
1607 	hpi_send_recv(&hm, &hr);
1608 	if (pparam1)
1609 		*pparam1 = hr.u.c.param1;
1610 	if (pparam2)
1611 		*pparam2 = hr.u.c.param2;
1612 
1613 	return hr.error;
1614 }
1615 
1616 #define hpi_control_param1_get(s, h, a, p1) \
1617 		hpi_control_param_get(s, h, a, 0, 0, p1, NULL)
1618 #define hpi_control_param2_get(s, h, a, p1, p2) \
1619 		hpi_control_param_get(s, h, a, 0, 0, p1, p2)
1620 #define hpi_control_ex_param1_get(s, h, a, p1) \
1621 		hpi_control_ex_param_get(s, h, a, 0, 0, p1, NULL)
1622 #define hpi_control_ex_param2_get(s, h, a, p1, p2) \
1623 		hpi_control_ex_param_get(s, h, a, 0, 0, p1, p2)
1624 
1625 static
1626 u16 hpi_control_query(const struct hpi_hsubsys *ph_subsys,
1627 	const u32 h_control, const u16 attrib, const u32 index,
1628 	const u32 param, u32 *psetting)
1629 {
1630 	struct hpi_message hm;
1631 	struct hpi_response hr;
1632 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1633 		HPI_CONTROL_GET_INFO);
1634 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1635 
1636 	hm.u.c.attribute = attrib;
1637 	hm.u.c.param1 = index;
1638 	hm.u.c.param2 = param;
1639 
1640 	hpi_send_recv(&hm, &hr);
1641 	*psetting = hr.u.c.param1;
1642 
1643 	return hr.error;
1644 }
1645 
1646 static u16 hpi_control_get_string(const struct hpi_hsubsys *ph_subsys,
1647 	const u32 h_control, const u16 attribute, char *psz_string,
1648 	const u32 string_length)
1649 {
1650 	unsigned int sub_string_index = 0, j = 0;
1651 	char c = 0;
1652 	unsigned int n = 0;
1653 	u16 hE = 0;
1654 
1655 	if ((string_length < 1) || (string_length > 256))
1656 		return HPI_ERROR_INVALID_CONTROL_VALUE;
1657 	for (sub_string_index = 0; sub_string_index < string_length;
1658 		sub_string_index += 8) {
1659 		struct hpi_message hm;
1660 		struct hpi_response hr;
1661 
1662 		hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1663 			HPI_CONTROL_GET_STATE);
1664 		u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1665 		hm.u.c.attribute = attribute;
1666 		hm.u.c.param1 = sub_string_index;
1667 		hm.u.c.param2 = 0;
1668 		hpi_send_recv(&hm, &hr);
1669 
1670 		if (sub_string_index == 0
1671 			&& (hr.u.cu.chars8.remaining_chars + 8) >
1672 			string_length)
1673 			return HPI_ERROR_INVALID_CONTROL_VALUE;
1674 
1675 		if (hr.error) {
1676 			hE = hr.error;
1677 			break;
1678 		}
1679 		for (j = 0; j < 8; j++) {
1680 			c = hr.u.cu.chars8.sz_data[j];
1681 			psz_string[sub_string_index + j] = c;
1682 			n++;
1683 			if (n >= string_length) {
1684 				psz_string[string_length - 1] = 0;
1685 				hE = HPI_ERROR_INVALID_CONTROL_VALUE;
1686 				break;
1687 			}
1688 			if (c == 0)
1689 				break;
1690 		}
1691 
1692 		if ((hr.u.cu.chars8.remaining_chars == 0)
1693 			&& ((sub_string_index + j) < string_length)
1694 			&& (c != 0)) {
1695 			c = 0;
1696 			psz_string[sub_string_index + j] = c;
1697 		}
1698 		if (c == 0)
1699 			break;
1700 	}
1701 	return hE;
1702 }
1703 
1704 u16 HPI_AESEBU__receiver_query_format(const struct hpi_hsubsys *ph_subsys,
1705 	const u32 h_aes_rx, const u32 index, u16 *pw_format)
1706 {
1707 	u32 qr;
1708 	u16 err;
1709 
1710 	err = hpi_control_query(ph_subsys, h_aes_rx, HPI_AESEBURX_FORMAT,
1711 		index, 0, &qr);
1712 	*pw_format = (u16)qr;
1713 	return err;
1714 }
1715 
1716 u16 HPI_AESEBU__receiver_set_format(const struct hpi_hsubsys *ph_subsys,
1717 	u32 h_control, u16 format)
1718 {
1719 	return hpi_control_param_set(ph_subsys, h_control,
1720 		HPI_AESEBURX_FORMAT, format, 0);
1721 }
1722 
1723 u16 HPI_AESEBU__receiver_get_format(const struct hpi_hsubsys *ph_subsys,
1724 	u32 h_control, u16 *pw_format)
1725 {
1726 	u16 err;
1727 	u32 param;
1728 
1729 	err = hpi_control_param1_get(ph_subsys, h_control,
1730 		HPI_AESEBURX_FORMAT, &param);
1731 	if (!err && pw_format)
1732 		*pw_format = (u16)param;
1733 
1734 	return err;
1735 }
1736 
1737 u16 HPI_AESEBU__receiver_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
1738 	u32 h_control, u32 *psample_rate)
1739 {
1740 	return hpi_control_param1_get(ph_subsys, h_control,
1741 		HPI_AESEBURX_SAMPLERATE, psample_rate);
1742 }
1743 
1744 u16 HPI_AESEBU__receiver_get_user_data(const struct hpi_hsubsys *ph_subsys,
1745 	u32 h_control, u16 index, u16 *pw_data)
1746 {
1747 	struct hpi_message hm;
1748 	struct hpi_response hr;
1749 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1750 		HPI_CONTROL_GET_STATE);
1751 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1752 	hm.u.c.attribute = HPI_AESEBURX_USERDATA;
1753 	hm.u.c.param1 = index;
1754 
1755 	hpi_send_recv(&hm, &hr);
1756 
1757 	if (pw_data)
1758 		*pw_data = (u16)hr.u.c.param2;
1759 	return hr.error;
1760 }
1761 
1762 u16 HPI_AESEBU__receiver_get_channel_status(const struct hpi_hsubsys
1763 	*ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1764 {
1765 	struct hpi_message hm;
1766 	struct hpi_response hr;
1767 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1768 		HPI_CONTROL_GET_STATE);
1769 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1770 	hm.u.c.attribute = HPI_AESEBURX_CHANNELSTATUS;
1771 	hm.u.c.param1 = index;
1772 
1773 	hpi_send_recv(&hm, &hr);
1774 
1775 	if (pw_data)
1776 		*pw_data = (u16)hr.u.c.param2;
1777 	return hr.error;
1778 }
1779 
1780 u16 HPI_AESEBU__receiver_get_error_status(const struct hpi_hsubsys *ph_subsys,
1781 	u32 h_control, u16 *pw_error_data)
1782 {
1783 	u32 error_data = 0;
1784 	u16 error = 0;
1785 
1786 	error = hpi_control_param1_get(ph_subsys, h_control,
1787 		HPI_AESEBURX_ERRORSTATUS, &error_data);
1788 	if (pw_error_data)
1789 		*pw_error_data = (u16)error_data;
1790 	return error;
1791 }
1792 
1793 u16 HPI_AESEBU__transmitter_set_sample_rate(const struct hpi_hsubsys
1794 	*ph_subsys, u32 h_control, u32 sample_rate)
1795 {
1796 	return hpi_control_param_set(ph_subsys, h_control,
1797 		HPI_AESEBUTX_SAMPLERATE, sample_rate, 0);
1798 }
1799 
1800 u16 HPI_AESEBU__transmitter_set_user_data(const struct hpi_hsubsys *ph_subsys,
1801 	u32 h_control, u16 index, u16 data)
1802 {
1803 	return hpi_control_param_set(ph_subsys, h_control,
1804 		HPI_AESEBUTX_USERDATA, index, data);
1805 }
1806 
1807 u16 HPI_AESEBU__transmitter_set_channel_status(const struct hpi_hsubsys
1808 	*ph_subsys, u32 h_control, u16 index, u16 data)
1809 {
1810 	return hpi_control_param_set(ph_subsys, h_control,
1811 		HPI_AESEBUTX_CHANNELSTATUS, index, data);
1812 }
1813 
1814 u16 HPI_AESEBU__transmitter_get_channel_status(const struct hpi_hsubsys
1815 	*ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1816 {
1817 	return HPI_ERROR_INVALID_OPERATION;
1818 }
1819 
1820 u16 HPI_AESEBU__transmitter_query_format(const struct hpi_hsubsys *ph_subsys,
1821 	const u32 h_aes_tx, const u32 index, u16 *pw_format)
1822 {
1823 	u32 qr;
1824 	u16 err;
1825 
1826 	err = hpi_control_query(ph_subsys, h_aes_tx, HPI_AESEBUTX_FORMAT,
1827 		index, 0, &qr);
1828 	*pw_format = (u16)qr;
1829 	return err;
1830 }
1831 
1832 u16 HPI_AESEBU__transmitter_set_format(const struct hpi_hsubsys *ph_subsys,
1833 	u32 h_control, u16 output_format)
1834 {
1835 	return hpi_control_param_set(ph_subsys, h_control,
1836 		HPI_AESEBUTX_FORMAT, output_format, 0);
1837 }
1838 
1839 u16 HPI_AESEBU__transmitter_get_format(const struct hpi_hsubsys *ph_subsys,
1840 	u32 h_control, u16 *pw_output_format)
1841 {
1842 	u16 err;
1843 	u32 param;
1844 
1845 	err = hpi_control_param1_get(ph_subsys, h_control,
1846 		HPI_AESEBUTX_FORMAT, &param);
1847 	if (!err && pw_output_format)
1848 		*pw_output_format = (u16)param;
1849 
1850 	return err;
1851 }
1852 
1853 u16 hpi_bitstream_set_clock_edge(const struct hpi_hsubsys *ph_subsys,
1854 	u32 h_control, u16 edge_type)
1855 {
1856 	return hpi_control_param_set(ph_subsys, h_control,
1857 		HPI_BITSTREAM_CLOCK_EDGE, edge_type, 0);
1858 }
1859 
1860 u16 hpi_bitstream_set_data_polarity(const struct hpi_hsubsys *ph_subsys,
1861 	u32 h_control, u16 polarity)
1862 {
1863 	return hpi_control_param_set(ph_subsys, h_control,
1864 		HPI_BITSTREAM_DATA_POLARITY, polarity, 0);
1865 }
1866 
1867 u16 hpi_bitstream_get_activity(const struct hpi_hsubsys *ph_subsys,
1868 	u32 h_control, u16 *pw_clk_activity, u16 *pw_data_activity)
1869 {
1870 	struct hpi_message hm;
1871 	struct hpi_response hr;
1872 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1873 		HPI_CONTROL_GET_STATE);
1874 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1875 	hm.u.c.attribute = HPI_BITSTREAM_ACTIVITY;
1876 	hpi_send_recv(&hm, &hr);
1877 	if (pw_clk_activity)
1878 		*pw_clk_activity = (u16)hr.u.c.param1;
1879 	if (pw_data_activity)
1880 		*pw_data_activity = (u16)hr.u.c.param2;
1881 	return hr.error;
1882 }
1883 
1884 u16 hpi_channel_mode_query_mode(const struct hpi_hsubsys *ph_subsys,
1885 	const u32 h_mode, const u32 index, u16 *pw_mode)
1886 {
1887 	u32 qr;
1888 	u16 err;
1889 
1890 	err = hpi_control_query(ph_subsys, h_mode, HPI_CHANNEL_MODE_MODE,
1891 		index, 0, &qr);
1892 	*pw_mode = (u16)qr;
1893 	return err;
1894 }
1895 
1896 u16 hpi_channel_mode_set(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1897 	u16 mode)
1898 {
1899 	return hpi_control_param_set(ph_subsys, h_control,
1900 		HPI_CHANNEL_MODE_MODE, mode, 0);
1901 }
1902 
1903 u16 hpi_channel_mode_get(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1904 	u16 *mode)
1905 {
1906 	u32 mode32 = 0;
1907 	u16 error = hpi_control_param1_get(ph_subsys, h_control,
1908 		HPI_CHANNEL_MODE_MODE, &mode32);
1909 	if (mode)
1910 		*mode = (u16)mode32;
1911 	return error;
1912 }
1913 
1914 u16 hpi_cobranet_hmi_write(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1915 	u32 hmi_address, u32 byte_count, u8 *pb_data)
1916 {
1917 	struct hpi_message hm;
1918 	struct hpi_response hr;
1919 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1920 		HPI_CONTROL_SET_STATE);
1921 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1922 
1923 	hm.u.cx.u.cobranet_data.byte_count = byte_count;
1924 	hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1925 
1926 	if (byte_count <= 8) {
1927 		memcpy(hm.u.cx.u.cobranet_data.data, pb_data, byte_count);
1928 		hm.u.cx.attribute = HPI_COBRANET_SET;
1929 	} else {
1930 		hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1931 		hm.u.cx.attribute = HPI_COBRANET_SET_DATA;
1932 	}
1933 
1934 	hpi_send_recv(&hm, &hr);
1935 
1936 	return hr.error;
1937 }
1938 
1939 u16 hpi_cobranet_hmi_read(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1940 	u32 hmi_address, u32 max_byte_count, u32 *pbyte_count, u8 *pb_data)
1941 {
1942 	struct hpi_message hm;
1943 	struct hpi_response hr;
1944 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1945 		HPI_CONTROL_GET_STATE);
1946 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1947 
1948 	hm.u.cx.u.cobranet_data.byte_count = max_byte_count;
1949 	hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1950 
1951 	if (max_byte_count <= 8) {
1952 		hm.u.cx.attribute = HPI_COBRANET_GET;
1953 	} else {
1954 		hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1955 		hm.u.cx.attribute = HPI_COBRANET_GET_DATA;
1956 	}
1957 
1958 	hpi_send_recv(&hm, &hr);
1959 	if (!hr.error && pb_data) {
1960 
1961 		*pbyte_count = hr.u.cx.u.cobranet_data.byte_count;
1962 
1963 		if (*pbyte_count < max_byte_count)
1964 			max_byte_count = *pbyte_count;
1965 
1966 		if (hm.u.cx.attribute == HPI_COBRANET_GET) {
1967 			memcpy(pb_data, hr.u.cx.u.cobranet_data.data,
1968 				max_byte_count);
1969 		} else {
1970 
1971 		}
1972 
1973 	}
1974 	return hr.error;
1975 }
1976 
1977 u16 hpi_cobranet_hmi_get_status(const struct hpi_hsubsys *ph_subsys,
1978 	u32 h_control, u32 *pstatus, u32 *preadable_size,
1979 	u32 *pwriteable_size)
1980 {
1981 	struct hpi_message hm;
1982 	struct hpi_response hr;
1983 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1984 		HPI_CONTROL_GET_STATE);
1985 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1986 
1987 	hm.u.cx.attribute = HPI_COBRANET_GET_STATUS;
1988 
1989 	hpi_send_recv(&hm, &hr);
1990 	if (!hr.error) {
1991 		if (pstatus)
1992 			*pstatus = hr.u.cx.u.cobranet_status.status;
1993 		if (preadable_size)
1994 			*preadable_size =
1995 				hr.u.cx.u.cobranet_status.readable_size;
1996 		if (pwriteable_size)
1997 			*pwriteable_size =
1998 				hr.u.cx.u.cobranet_status.writeable_size;
1999 	}
2000 	return hr.error;
2001 }
2002 
2003 u16 hpi_cobranet_getI_paddress(const struct hpi_hsubsys *ph_subsys,
2004 	u32 h_control, u32 *pi_paddress)
2005 {
2006 	u32 byte_count;
2007 	u32 iP;
2008 	u16 error;
2009 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2010 		HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, &byte_count,
2011 		(u8 *)&iP);
2012 
2013 	*pi_paddress =
2014 		((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2015 			0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2016 
2017 	if (error)
2018 		*pi_paddress = 0;
2019 
2020 	return error;
2021 
2022 }
2023 
2024 u16 hpi_cobranet_setI_paddress(const struct hpi_hsubsys *ph_subsys,
2025 	u32 h_control, u32 i_paddress)
2026 {
2027 	u32 iP;
2028 	u16 error;
2029 
2030 	iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2031 		8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2032 			0x000000ff) << 8);
2033 
2034 	error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2035 		HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, (u8 *)&iP);
2036 
2037 	return error;
2038 
2039 }
2040 
2041 u16 hpi_cobranet_get_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2042 	u32 h_control, u32 *pi_paddress)
2043 {
2044 	u32 byte_count;
2045 	u32 iP;
2046 	u16 error;
2047 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2048 		HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, &byte_count,
2049 		(u8 *)&iP);
2050 
2051 	*pi_paddress =
2052 		((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2053 			0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2054 
2055 	if (error)
2056 		*pi_paddress = 0;
2057 
2058 	return error;
2059 
2060 }
2061 
2062 u16 hpi_cobranet_set_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2063 	u32 h_control, u32 i_paddress)
2064 {
2065 	u32 iP;
2066 	u16 error;
2067 
2068 	iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2069 		8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2070 			0x000000ff) << 8);
2071 
2072 	error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2073 		HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, (u8 *)&iP);
2074 
2075 	return error;
2076 
2077 }
2078 
2079 u16 hpi_cobranet_getMA_caddress(const struct hpi_hsubsys *ph_subsys,
2080 	u32 h_control, u32 *pmAC_MS_bs, u32 *pmAC_LS_bs)
2081 {
2082 	u32 byte_count;
2083 	u16 error;
2084 	u32 mAC;
2085 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2086 		HPI_COBRANET_HMI_cobra_if_phy_address, 4, &byte_count,
2087 		(u8 *)&mAC);
2088 	*pmAC_MS_bs =
2089 		((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2090 			& 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2091 	error += hpi_cobranet_hmi_read(ph_subsys, h_control,
2092 		HPI_COBRANET_HMI_cobra_if_phy_address + 1, 4, &byte_count,
2093 		(u8 *)&mAC);
2094 	*pmAC_LS_bs =
2095 		((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2096 			& 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2097 
2098 	if (error) {
2099 		*pmAC_MS_bs = 0;
2100 		*pmAC_LS_bs = 0;
2101 	}
2102 
2103 	return error;
2104 }
2105 
2106 u16 hpi_compander_set(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2107 	u16 attack, u16 decay, short ratio100, short threshold0_01dB,
2108 	short makeup_gain0_01dB)
2109 {
2110 	struct hpi_message hm;
2111 	struct hpi_response hr;
2112 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2113 		HPI_CONTROL_SET_STATE);
2114 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2115 
2116 	hm.u.c.param1 = attack + ((u32)ratio100 << 16);
2117 	hm.u.c.param2 = (decay & 0xFFFFL);
2118 	hm.u.c.an_log_value[0] = threshold0_01dB;
2119 	hm.u.c.an_log_value[1] = makeup_gain0_01dB;
2120 	hm.u.c.attribute = HPI_COMPANDER_PARAMS;
2121 
2122 	hpi_send_recv(&hm, &hr);
2123 
2124 	return hr.error;
2125 }
2126 
2127 u16 hpi_compander_get(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2128 	u16 *pw_attack, u16 *pw_decay, short *pw_ratio100,
2129 	short *pn_threshold0_01dB, short *pn_makeup_gain0_01dB)
2130 {
2131 	struct hpi_message hm;
2132 	struct hpi_response hr;
2133 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2134 		HPI_CONTROL_GET_STATE);
2135 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2136 	hm.u.c.attribute = HPI_COMPANDER_PARAMS;
2137 
2138 	hpi_send_recv(&hm, &hr);
2139 
2140 	if (pw_attack)
2141 		*pw_attack = (short)(hr.u.c.param1 & 0xFFFF);
2142 	if (pw_decay)
2143 		*pw_decay = (short)(hr.u.c.param2 & 0xFFFF);
2144 	if (pw_ratio100)
2145 		*pw_ratio100 = (short)(hr.u.c.param1 >> 16);
2146 
2147 	if (pn_threshold0_01dB)
2148 		*pn_threshold0_01dB = hr.u.c.an_log_value[0];
2149 	if (pn_makeup_gain0_01dB)
2150 		*pn_makeup_gain0_01dB = hr.u.c.an_log_value[1];
2151 
2152 	return hr.error;
2153 }
2154 
2155 u16 hpi_level_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2156 	short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
2157 {
2158 	struct hpi_message hm;
2159 	struct hpi_response hr;
2160 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2161 		HPI_CONTROL_GET_STATE);
2162 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2163 	hm.u.c.attribute = HPI_LEVEL_RANGE;
2164 
2165 	hpi_send_recv(&hm, &hr);
2166 	if (hr.error) {
2167 		hr.u.c.an_log_value[0] = 0;
2168 		hr.u.c.an_log_value[1] = 0;
2169 		hr.u.c.param1 = 0;
2170 	}
2171 	if (min_gain_01dB)
2172 		*min_gain_01dB = hr.u.c.an_log_value[0];
2173 	if (max_gain_01dB)
2174 		*max_gain_01dB = hr.u.c.an_log_value[1];
2175 	if (step_gain_01dB)
2176 		*step_gain_01dB = (short)hr.u.c.param1;
2177 	return hr.error;
2178 }
2179 
2180 u16 hpi_level_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2181 	short an_gain0_01dB[HPI_MAX_CHANNELS]
2182 	)
2183 {
2184 	struct hpi_message hm;
2185 	struct hpi_response hr;
2186 
2187 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2188 		HPI_CONTROL_SET_STATE);
2189 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2190 	memcpy(hm.u.c.an_log_value, an_gain0_01dB,
2191 		sizeof(short) * HPI_MAX_CHANNELS);
2192 	hm.u.c.attribute = HPI_LEVEL_GAIN;
2193 
2194 	hpi_send_recv(&hm, &hr);
2195 
2196 	return hr.error;
2197 }
2198 
2199 u16 hpi_level_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2200 	short an_gain0_01dB[HPI_MAX_CHANNELS]
2201 	)
2202 {
2203 	struct hpi_message hm;
2204 	struct hpi_response hr;
2205 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2206 		HPI_CONTROL_GET_STATE);
2207 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2208 	hm.u.c.attribute = HPI_LEVEL_GAIN;
2209 
2210 	hpi_send_recv(&hm, &hr);
2211 
2212 	memcpy(an_gain0_01dB, hr.u.c.an_log_value,
2213 		sizeof(short) * HPI_MAX_CHANNELS);
2214 	return hr.error;
2215 }
2216 
2217 u16 hpi_meter_query_channels(const struct hpi_hsubsys *ph_subsys,
2218 	const u32 h_meter, u32 *p_channels)
2219 {
2220 	return hpi_control_query(ph_subsys, h_meter, HPI_METER_NUM_CHANNELS,
2221 		0, 0, p_channels);
2222 }
2223 
2224 u16 hpi_meter_get_peak(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2225 	short an_peakdB[HPI_MAX_CHANNELS]
2226 	)
2227 {
2228 	short i = 0;
2229 
2230 	struct hpi_message hm;
2231 	struct hpi_response hr;
2232 
2233 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2234 		HPI_CONTROL_GET_STATE);
2235 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2236 	hm.obj_index = hm.obj_index;
2237 	hm.u.c.attribute = HPI_METER_PEAK;
2238 
2239 	hpi_send_recv(&hm, &hr);
2240 
2241 	if (!hr.error)
2242 		memcpy(an_peakdB, hr.u.c.an_log_value,
2243 			sizeof(short) * HPI_MAX_CHANNELS);
2244 	else
2245 		for (i = 0; i < HPI_MAX_CHANNELS; i++)
2246 			an_peakdB[i] = HPI_METER_MINIMUM;
2247 	return hr.error;
2248 }
2249 
2250 u16 hpi_meter_get_rms(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2251 	short an_rmsdB[HPI_MAX_CHANNELS]
2252 	)
2253 {
2254 	short i = 0;
2255 
2256 	struct hpi_message hm;
2257 	struct hpi_response hr;
2258 
2259 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2260 		HPI_CONTROL_GET_STATE);
2261 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2262 	hm.u.c.attribute = HPI_METER_RMS;
2263 
2264 	hpi_send_recv(&hm, &hr);
2265 
2266 	if (!hr.error)
2267 		memcpy(an_rmsdB, hr.u.c.an_log_value,
2268 			sizeof(short) * HPI_MAX_CHANNELS);
2269 	else
2270 		for (i = 0; i < HPI_MAX_CHANNELS; i++)
2271 			an_rmsdB[i] = HPI_METER_MINIMUM;
2272 
2273 	return hr.error;
2274 }
2275 
2276 u16 hpi_meter_set_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2277 	u32 h_control, u16 attack, u16 decay)
2278 {
2279 	return hpi_control_param_set(ph_subsys, h_control,
2280 		HPI_METER_RMS_BALLISTICS, attack, decay);
2281 }
2282 
2283 u16 hpi_meter_get_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2284 	u32 h_control, u16 *pn_attack, u16 *pn_decay)
2285 {
2286 	u32 attack;
2287 	u32 decay;
2288 	u16 error;
2289 
2290 	error = hpi_control_param2_get(ph_subsys, h_control,
2291 		HPI_METER_RMS_BALLISTICS, &attack, &decay);
2292 
2293 	if (pn_attack)
2294 		*pn_attack = (unsigned short)attack;
2295 	if (pn_decay)
2296 		*pn_decay = (unsigned short)decay;
2297 
2298 	return error;
2299 }
2300 
2301 u16 hpi_meter_set_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2302 	u32 h_control, u16 attack, u16 decay)
2303 {
2304 	return hpi_control_param_set(ph_subsys, h_control,
2305 		HPI_METER_PEAK_BALLISTICS, attack, decay);
2306 }
2307 
2308 u16 hpi_meter_get_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2309 	u32 h_control, u16 *pn_attack, u16 *pn_decay)
2310 {
2311 	u32 attack;
2312 	u32 decay;
2313 	u16 error;
2314 
2315 	error = hpi_control_param2_get(ph_subsys, h_control,
2316 		HPI_METER_PEAK_BALLISTICS, &attack, &decay);
2317 
2318 	if (pn_attack)
2319 		*pn_attack = (short)attack;
2320 	if (pn_decay)
2321 		*pn_decay = (short)decay;
2322 
2323 	return error;
2324 }
2325 
2326 u16 hpi_microphone_set_phantom_power(const struct hpi_hsubsys *ph_subsys,
2327 	u32 h_control, u16 on_off)
2328 {
2329 	return hpi_control_param_set(ph_subsys, h_control,
2330 		HPI_MICROPHONE_PHANTOM_POWER, (u32)on_off, 0);
2331 }
2332 
2333 u16 hpi_microphone_get_phantom_power(const struct hpi_hsubsys *ph_subsys,
2334 	u32 h_control, u16 *pw_on_off)
2335 {
2336 	u16 error = 0;
2337 	u32 on_off = 0;
2338 	error = hpi_control_param1_get(ph_subsys, h_control,
2339 		HPI_MICROPHONE_PHANTOM_POWER, &on_off);
2340 	if (pw_on_off)
2341 		*pw_on_off = (u16)on_off;
2342 	return error;
2343 }
2344 
2345 u16 hpi_multiplexer_set_source(const struct hpi_hsubsys *ph_subsys,
2346 	u32 h_control, u16 source_node_type, u16 source_node_index)
2347 {
2348 	return hpi_control_param_set(ph_subsys, h_control,
2349 		HPI_MULTIPLEXER_SOURCE, source_node_type, source_node_index);
2350 }
2351 
2352 u16 hpi_multiplexer_get_source(const struct hpi_hsubsys *ph_subsys,
2353 	u32 h_control, u16 *source_node_type, u16 *source_node_index)
2354 {
2355 	u32 node, index;
2356 	u16 error = hpi_control_param2_get(ph_subsys, h_control,
2357 		HPI_MULTIPLEXER_SOURCE, &node,
2358 		&index);
2359 	if (source_node_type)
2360 		*source_node_type = (u16)node;
2361 	if (source_node_index)
2362 		*source_node_index = (u16)index;
2363 	return error;
2364 }
2365 
2366 u16 hpi_multiplexer_query_source(const struct hpi_hsubsys *ph_subsys,
2367 	u32 h_control, u16 index, u16 *source_node_type,
2368 	u16 *source_node_index)
2369 {
2370 	struct hpi_message hm;
2371 	struct hpi_response hr;
2372 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2373 		HPI_CONTROL_GET_STATE);
2374 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2375 	hm.u.c.attribute = HPI_MULTIPLEXER_QUERYSOURCE;
2376 	hm.u.c.param1 = index;
2377 
2378 	hpi_send_recv(&hm, &hr);
2379 
2380 	if (source_node_type)
2381 		*source_node_type = (u16)hr.u.c.param1;
2382 	if (source_node_index)
2383 		*source_node_index = (u16)hr.u.c.param2;
2384 	return hr.error;
2385 }
2386 
2387 u16 hpi_parametricEQ__get_info(const struct hpi_hsubsys *ph_subsys,
2388 	u32 h_control, u16 *pw_number_of_bands, u16 *pw_on_off)
2389 {
2390 	u32 oB = 0;
2391 	u32 oO = 0;
2392 	u16 error = 0;
2393 
2394 	error = hpi_control_param2_get(ph_subsys, h_control,
2395 		HPI_EQUALIZER_NUM_FILTERS, &oO, &oB);
2396 	if (pw_number_of_bands)
2397 		*pw_number_of_bands = (u16)oB;
2398 	if (pw_on_off)
2399 		*pw_on_off = (u16)oO;
2400 	return error;
2401 }
2402 
2403 u16 hpi_parametricEQ__set_state(const struct hpi_hsubsys *ph_subsys,
2404 	u32 h_control, u16 on_off)
2405 {
2406 	return hpi_control_param_set(ph_subsys, h_control,
2407 		HPI_EQUALIZER_NUM_FILTERS, on_off, 0);
2408 }
2409 
2410 u16 hpi_parametricEQ__get_band(const struct hpi_hsubsys *ph_subsys,
2411 	u32 h_control, u16 index, u16 *pn_type, u32 *pfrequency_hz,
2412 	short *pnQ100, short *pn_gain0_01dB)
2413 {
2414 	struct hpi_message hm;
2415 	struct hpi_response hr;
2416 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2417 		HPI_CONTROL_GET_STATE);
2418 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2419 	hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2420 	hm.u.c.param2 = index;
2421 
2422 	hpi_send_recv(&hm, &hr);
2423 
2424 	if (pfrequency_hz)
2425 		*pfrequency_hz = hr.u.c.param1;
2426 	if (pn_type)
2427 		*pn_type = (u16)(hr.u.c.param2 >> 16);
2428 	if (pnQ100)
2429 		*pnQ100 = hr.u.c.an_log_value[1];
2430 	if (pn_gain0_01dB)
2431 		*pn_gain0_01dB = hr.u.c.an_log_value[0];
2432 
2433 	return hr.error;
2434 }
2435 
2436 u16 hpi_parametricEQ__set_band(const struct hpi_hsubsys *ph_subsys,
2437 	u32 h_control, u16 index, u16 type, u32 frequency_hz, short q100,
2438 	short gain0_01dB)
2439 {
2440 	struct hpi_message hm;
2441 	struct hpi_response hr;
2442 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2443 		HPI_CONTROL_SET_STATE);
2444 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2445 
2446 	hm.u.c.param1 = frequency_hz;
2447 	hm.u.c.param2 = (index & 0xFFFFL) + ((u32)type << 16);
2448 	hm.u.c.an_log_value[0] = gain0_01dB;
2449 	hm.u.c.an_log_value[1] = q100;
2450 	hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2451 
2452 	hpi_send_recv(&hm, &hr);
2453 
2454 	return hr.error;
2455 }
2456 
2457 u16 hpi_parametricEQ__get_coeffs(const struct hpi_hsubsys *ph_subsys,
2458 	u32 h_control, u16 index, short coeffs[5]
2459 	)
2460 {
2461 	struct hpi_message hm;
2462 	struct hpi_response hr;
2463 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2464 		HPI_CONTROL_GET_STATE);
2465 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2466 	hm.u.c.attribute = HPI_EQUALIZER_COEFFICIENTS;
2467 	hm.u.c.param2 = index;
2468 
2469 	hpi_send_recv(&hm, &hr);
2470 
2471 	coeffs[0] = (short)hr.u.c.an_log_value[0];
2472 	coeffs[1] = (short)hr.u.c.an_log_value[1];
2473 	coeffs[2] = (short)hr.u.c.param1;
2474 	coeffs[3] = (short)(hr.u.c.param1 >> 16);
2475 	coeffs[4] = (short)hr.u.c.param2;
2476 
2477 	return hr.error;
2478 }
2479 
2480 u16 hpi_sample_clock_query_source(const struct hpi_hsubsys *ph_subsys,
2481 	const u32 h_clock, const u32 index, u16 *pw_source)
2482 {
2483 	u32 qr;
2484 	u16 err;
2485 
2486 	err = hpi_control_query(ph_subsys, h_clock, HPI_SAMPLECLOCK_SOURCE,
2487 		index, 0, &qr);
2488 	*pw_source = (u16)qr;
2489 	return err;
2490 }
2491 
2492 u16 hpi_sample_clock_set_source(const struct hpi_hsubsys *ph_subsys,
2493 	u32 h_control, u16 source)
2494 {
2495 	return hpi_control_param_set(ph_subsys, h_control,
2496 		HPI_SAMPLECLOCK_SOURCE, source, 0);
2497 }
2498 
2499 u16 hpi_sample_clock_get_source(const struct hpi_hsubsys *ph_subsys,
2500 	u32 h_control, u16 *pw_source)
2501 {
2502 	u16 error = 0;
2503 	u32 source = 0;
2504 	error = hpi_control_param1_get(ph_subsys, h_control,
2505 		HPI_SAMPLECLOCK_SOURCE, &source);
2506 	if (!error)
2507 		if (pw_source)
2508 			*pw_source = (u16)source;
2509 	return error;
2510 }
2511 
2512 u16 hpi_sample_clock_query_source_index(const struct hpi_hsubsys *ph_subsys,
2513 	const u32 h_clock, const u32 index, const u32 source,
2514 	u16 *pw_source_index)
2515 {
2516 	u32 qr;
2517 	u16 err;
2518 
2519 	err = hpi_control_query(ph_subsys, h_clock,
2520 		HPI_SAMPLECLOCK_SOURCE_INDEX, index, source, &qr);
2521 	*pw_source_index = (u16)qr;
2522 	return err;
2523 }
2524 
2525 u16 hpi_sample_clock_set_source_index(const struct hpi_hsubsys *ph_subsys,
2526 	u32 h_control, u16 source_index)
2527 {
2528 	return hpi_control_param_set(ph_subsys, h_control,
2529 		HPI_SAMPLECLOCK_SOURCE_INDEX, source_index, 0);
2530 }
2531 
2532 u16 hpi_sample_clock_get_source_index(const struct hpi_hsubsys *ph_subsys,
2533 	u32 h_control, u16 *pw_source_index)
2534 {
2535 	u16 error = 0;
2536 	u32 source_index = 0;
2537 	error = hpi_control_param1_get(ph_subsys, h_control,
2538 		HPI_SAMPLECLOCK_SOURCE_INDEX, &source_index);
2539 	if (!error)
2540 		if (pw_source_index)
2541 			*pw_source_index = (u16)source_index;
2542 	return error;
2543 }
2544 
2545 u16 hpi_sample_clock_query_local_rate(const struct hpi_hsubsys *ph_subsys,
2546 	const u32 h_clock, const u32 index, u32 *prate)
2547 {
2548 	u16 err;
2549 	err = hpi_control_query(ph_subsys, h_clock,
2550 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, index, 0, prate);
2551 
2552 	return err;
2553 }
2554 
2555 u16 hpi_sample_clock_set_local_rate(const struct hpi_hsubsys *ph_subsys,
2556 	u32 h_control, u32 sample_rate)
2557 {
2558 	return hpi_control_param_set(ph_subsys, h_control,
2559 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, sample_rate, 0);
2560 }
2561 
2562 u16 hpi_sample_clock_get_local_rate(const struct hpi_hsubsys *ph_subsys,
2563 	u32 h_control, u32 *psample_rate)
2564 {
2565 	u16 error = 0;
2566 	u32 sample_rate = 0;
2567 	error = hpi_control_param1_get(ph_subsys, h_control,
2568 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, &sample_rate);
2569 	if (!error)
2570 		if (psample_rate)
2571 			*psample_rate = sample_rate;
2572 	return error;
2573 }
2574 
2575 u16 hpi_sample_clock_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
2576 	u32 h_control, u32 *psample_rate)
2577 {
2578 	u16 error = 0;
2579 	u32 sample_rate = 0;
2580 	error = hpi_control_param1_get(ph_subsys, h_control,
2581 		HPI_SAMPLECLOCK_SAMPLERATE, &sample_rate);
2582 	if (!error)
2583 		if (psample_rate)
2584 			*psample_rate = sample_rate;
2585 	return error;
2586 }
2587 
2588 u16 hpi_sample_clock_set_auto(const struct hpi_hsubsys *ph_subsys,
2589 	u32 h_control, u32 enable)
2590 {
2591 	return hpi_control_param_set(ph_subsys, h_control,
2592 		HPI_SAMPLECLOCK_AUTO, enable, 0);
2593 }
2594 
2595 u16 hpi_sample_clock_get_auto(const struct hpi_hsubsys *ph_subsys,
2596 	u32 h_control, u32 *penable)
2597 {
2598 	return hpi_control_param1_get(ph_subsys, h_control,
2599 		HPI_SAMPLECLOCK_AUTO, penable);
2600 }
2601 
2602 u16 hpi_sample_clock_set_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2603 	u32 h_control, u32 lock)
2604 {
2605 	return hpi_control_param_set(ph_subsys, h_control,
2606 		HPI_SAMPLECLOCK_LOCAL_LOCK, lock, 0);
2607 }
2608 
2609 u16 hpi_sample_clock_get_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2610 	u32 h_control, u32 *plock)
2611 {
2612 	return hpi_control_param1_get(ph_subsys, h_control,
2613 		HPI_SAMPLECLOCK_LOCAL_LOCK, plock);
2614 }
2615 
2616 u16 hpi_tone_detector_get_frequency(const struct hpi_hsubsys *ph_subsys,
2617 	u32 h_control, u32 index, u32 *frequency)
2618 {
2619 	return hpi_control_param_get(ph_subsys, h_control,
2620 		HPI_TONEDETECTOR_FREQUENCY, index, 0, frequency, NULL);
2621 }
2622 
2623 u16 hpi_tone_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2624 	u32 h_control, u32 *state)
2625 {
2626 	return hpi_control_param_get(ph_subsys, h_control,
2627 		HPI_TONEDETECTOR_STATE, 0, 0, (u32 *)state, NULL);
2628 }
2629 
2630 u16 hpi_tone_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2631 	u32 h_control, u32 enable)
2632 {
2633 	return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2634 		(u32)enable, 0);
2635 }
2636 
2637 u16 hpi_tone_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2638 	u32 h_control, u32 *enable)
2639 {
2640 	return hpi_control_param_get(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2641 		0, 0, (u32 *)enable, NULL);
2642 }
2643 
2644 u16 hpi_tone_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2645 	u32 h_control, u32 event_enable)
2646 {
2647 	return hpi_control_param_set(ph_subsys, h_control,
2648 		HPI_GENERIC_EVENT_ENABLE, (u32)event_enable, 0);
2649 }
2650 
2651 u16 hpi_tone_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2652 	u32 h_control, u32 *event_enable)
2653 {
2654 	return hpi_control_param_get(ph_subsys, h_control,
2655 		HPI_GENERIC_EVENT_ENABLE, 0, 0, (u32 *)event_enable, NULL);
2656 }
2657 
2658 u16 hpi_tone_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2659 	u32 h_control, int threshold)
2660 {
2661 	return hpi_control_param_set(ph_subsys, h_control,
2662 		HPI_TONEDETECTOR_THRESHOLD, (u32)threshold, 0);
2663 }
2664 
2665 u16 hpi_tone_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2666 	u32 h_control, int *threshold)
2667 {
2668 	return hpi_control_param_get(ph_subsys, h_control,
2669 		HPI_TONEDETECTOR_THRESHOLD, 0, 0, (u32 *)threshold, NULL);
2670 }
2671 
2672 u16 hpi_silence_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2673 	u32 h_control, u32 *state)
2674 {
2675 	return hpi_control_param_get(ph_subsys, h_control,
2676 		HPI_SILENCEDETECTOR_STATE, 0, 0, (u32 *)state, NULL);
2677 }
2678 
2679 u16 hpi_silence_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2680 	u32 h_control, u32 enable)
2681 {
2682 	return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2683 		(u32)enable, 0);
2684 }
2685 
2686 u16 hpi_silence_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2687 	u32 h_control, u32 *enable)
2688 {
2689 	return hpi_control_param_get(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2690 		0, 0, (u32 *)enable, NULL);
2691 }
2692 
2693 u16 hpi_silence_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2694 	u32 h_control, u32 event_enable)
2695 {
2696 	return hpi_control_param_set(ph_subsys, h_control,
2697 		HPI_GENERIC_EVENT_ENABLE, (u32)event_enable, 0);
2698 }
2699 
2700 u16 hpi_silence_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2701 	u32 h_control, u32 *event_enable)
2702 {
2703 	return hpi_control_param_get(ph_subsys, h_control,
2704 		HPI_GENERIC_EVENT_ENABLE, 0, 0, (u32 *)event_enable, NULL);
2705 }
2706 
2707 u16 hpi_silence_detector_set_delay(const struct hpi_hsubsys *ph_subsys,
2708 	u32 h_control, u32 delay)
2709 {
2710 	return hpi_control_param_set(ph_subsys, h_control,
2711 		HPI_SILENCEDETECTOR_DELAY, (u32)delay, 0);
2712 }
2713 
2714 u16 hpi_silence_detector_get_delay(const struct hpi_hsubsys *ph_subsys,
2715 	u32 h_control, u32 *delay)
2716 {
2717 	return hpi_control_param_get(ph_subsys, h_control,
2718 		HPI_SILENCEDETECTOR_DELAY, 0, 0, (u32 *)delay, NULL);
2719 }
2720 
2721 u16 hpi_silence_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2722 	u32 h_control, int threshold)
2723 {
2724 	return hpi_control_param_set(ph_subsys, h_control,
2725 		HPI_SILENCEDETECTOR_THRESHOLD, (u32)threshold, 0);
2726 }
2727 
2728 u16 hpi_silence_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2729 	u32 h_control, int *threshold)
2730 {
2731 	return hpi_control_param_get(ph_subsys, h_control,
2732 		HPI_SILENCEDETECTOR_THRESHOLD, 0, 0, (u32 *)threshold, NULL);
2733 }
2734 
2735 u16 hpi_tuner_query_band(const struct hpi_hsubsys *ph_subsys,
2736 	const u32 h_tuner, const u32 index, u16 *pw_band)
2737 {
2738 	u32 qr;
2739 	u16 err;
2740 
2741 	err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2742 		&qr);
2743 	*pw_band = (u16)qr;
2744 	return err;
2745 }
2746 
2747 u16 hpi_tuner_set_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2748 	u16 band)
2749 {
2750 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_BAND,
2751 		band, 0);
2752 }
2753 
2754 u16 hpi_tuner_get_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2755 	u16 *pw_band)
2756 {
2757 	u32 band = 0;
2758 	u16 error = 0;
2759 
2760 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_BAND,
2761 		&band);
2762 	if (pw_band)
2763 		*pw_band = (u16)band;
2764 	return error;
2765 }
2766 
2767 u16 hpi_tuner_query_frequency(const struct hpi_hsubsys *ph_subsys,
2768 	const u32 h_tuner, const u32 index, const u16 band, u32 *pfreq)
2769 {
2770 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_FREQ, index,
2771 		band, pfreq);
2772 }
2773 
2774 u16 hpi_tuner_set_frequency(const struct hpi_hsubsys *ph_subsys,
2775 	u32 h_control, u32 freq_ink_hz)
2776 {
2777 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_FREQ,
2778 		freq_ink_hz, 0);
2779 }
2780 
2781 u16 hpi_tuner_get_frequency(const struct hpi_hsubsys *ph_subsys,
2782 	u32 h_control, u32 *pw_freq_ink_hz)
2783 {
2784 	return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_FREQ,
2785 		pw_freq_ink_hz);
2786 }
2787 
2788 u16 hpi_tuner_query_gain(const struct hpi_hsubsys *ph_subsys,
2789 	const u32 h_tuner, const u32 index, u16 *pw_gain)
2790 {
2791 	u32 qr;
2792 	u16 err;
2793 
2794 	err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2795 		&qr);
2796 	*pw_gain = (u16)qr;
2797 	return err;
2798 }
2799 
2800 u16 hpi_tuner_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2801 	short gain)
2802 {
2803 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_GAIN,
2804 		gain, 0);
2805 }
2806 
2807 u16 hpi_tuner_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2808 	short *pn_gain)
2809 {
2810 	u32 gain = 0;
2811 	u16 error = 0;
2812 
2813 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_GAIN,
2814 		&gain);
2815 	if (pn_gain)
2816 		*pn_gain = (u16)gain;
2817 	return error;
2818 }
2819 
2820 u16 hpi_tuner_getRF_level(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2821 	short *pw_level)
2822 {
2823 	struct hpi_message hm;
2824 	struct hpi_response hr;
2825 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2826 		HPI_CONTROL_GET_STATE);
2827 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2828 	hm.u.c.attribute = HPI_TUNER_LEVEL;
2829 	hm.u.c.param1 = HPI_TUNER_LEVEL_AVERAGE;
2830 	hpi_send_recv(&hm, &hr);
2831 	if (pw_level)
2832 		*pw_level = (short)hr.u.c.param1;
2833 	return hr.error;
2834 }
2835 
2836 u16 hpi_tuner_get_rawRF_level(const struct hpi_hsubsys *ph_subsys,
2837 	u32 h_control, short *pw_level)
2838 {
2839 	struct hpi_message hm;
2840 	struct hpi_response hr;
2841 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2842 		HPI_CONTROL_GET_STATE);
2843 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2844 	hm.u.c.attribute = HPI_TUNER_LEVEL;
2845 	hm.u.c.param1 = HPI_TUNER_LEVEL_RAW;
2846 	hpi_send_recv(&hm, &hr);
2847 	if (pw_level)
2848 		*pw_level = (short)hr.u.c.param1;
2849 	return hr.error;
2850 }
2851 
2852 u16 hpi_tuner_query_deemphasis(const struct hpi_hsubsys *ph_subsys,
2853 	const u32 h_tuner, const u32 index, const u16 band, u32 *pdeemphasis)
2854 {
2855 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_DEEMPHASIS,
2856 		index, band, pdeemphasis);
2857 }
2858 
2859 u16 hpi_tuner_set_deemphasis(const struct hpi_hsubsys *ph_subsys,
2860 	u32 h_control, u32 deemphasis)
2861 {
2862 	return hpi_control_param_set(ph_subsys, h_control,
2863 		HPI_TUNER_DEEMPHASIS, deemphasis, 0);
2864 }
2865 
2866 u16 hpi_tuner_get_deemphasis(const struct hpi_hsubsys *ph_subsys,
2867 	u32 h_control, u32 *pdeemphasis)
2868 {
2869 	return hpi_control_param1_get(ph_subsys, h_control,
2870 		HPI_TUNER_DEEMPHASIS, pdeemphasis);
2871 }
2872 
2873 u16 hpi_tuner_query_program(const struct hpi_hsubsys *ph_subsys,
2874 	const u32 h_tuner, u32 *pbitmap_program)
2875 {
2876 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_PROGRAM, 0, 0,
2877 		pbitmap_program);
2878 }
2879 
2880 u16 hpi_tuner_set_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2881 	u32 program)
2882 {
2883 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2884 		program, 0);
2885 }
2886 
2887 u16 hpi_tuner_get_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2888 	u32 *pprogram)
2889 {
2890 	return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2891 		pprogram);
2892 }
2893 
2894 u16 hpi_tuner_get_hd_radio_dsp_version(const struct hpi_hsubsys *ph_subsys,
2895 	u32 h_control, char *psz_dsp_version, const u32 string_size)
2896 {
2897 	return hpi_control_get_string(ph_subsys, h_control,
2898 		HPI_TUNER_HDRADIO_DSP_VERSION, psz_dsp_version, string_size);
2899 }
2900 
2901 u16 hpi_tuner_get_hd_radio_sdk_version(const struct hpi_hsubsys *ph_subsys,
2902 	u32 h_control, char *psz_sdk_version, const u32 string_size)
2903 {
2904 	return hpi_control_get_string(ph_subsys, h_control,
2905 		HPI_TUNER_HDRADIO_SDK_VERSION, psz_sdk_version, string_size);
2906 }
2907 
2908 u16 hpi_tuner_get_status(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2909 	u16 *pw_status_mask, u16 *pw_status)
2910 {
2911 	u32 status = 0;
2912 	u16 error = 0;
2913 
2914 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_STATUS,
2915 		&status);
2916 	if (pw_status) {
2917 		if (!error) {
2918 			*pw_status_mask = (u16)(status >> 16);
2919 			*pw_status = (u16)(status & 0xFFFF);
2920 		} else {
2921 			*pw_status_mask = 0;
2922 			*pw_status = 0;
2923 		}
2924 	}
2925 	return error;
2926 }
2927 
2928 u16 hpi_tuner_set_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2929 	u32 mode, u32 value)
2930 {
2931 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_MODE,
2932 		mode, value);
2933 }
2934 
2935 u16 hpi_tuner_get_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2936 	u32 mode, u32 *pn_value)
2937 {
2938 	return hpi_control_param_get(ph_subsys, h_control, HPI_TUNER_MODE,
2939 		mode, 0, pn_value, NULL);
2940 }
2941 
2942 u16 hpi_tuner_get_hd_radio_signal_quality(const struct hpi_hsubsys *ph_subsys,
2943 	u32 h_control, u32 *pquality)
2944 {
2945 	return hpi_control_param_get(ph_subsys, h_control,
2946 		HPI_TUNER_HDRADIO_SIGNAL_QUALITY, 0, 0, pquality, NULL);
2947 }
2948 
2949 u16 hpi_tuner_get_hd_radio_signal_blend(const struct hpi_hsubsys *ph_subsys,
2950 	u32 h_control, u32 *pblend)
2951 {
2952 	return hpi_control_param_get(ph_subsys, h_control,
2953 		HPI_TUNER_HDRADIO_BLEND, 0, 0, pblend, NULL);
2954 }
2955 
2956 u16 hpi_tuner_set_hd_radio_signal_blend(const struct hpi_hsubsys *ph_subsys,
2957 	u32 h_control, const u32 blend)
2958 {
2959 	return hpi_control_param_set(ph_subsys, h_control,
2960 		HPI_TUNER_HDRADIO_BLEND, blend, 0);
2961 }
2962 
2963 u16 hpi_tuner_getRDS(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2964 	char *p_data)
2965 {
2966 	struct hpi_message hm;
2967 	struct hpi_response hr;
2968 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2969 		HPI_CONTROL_GET_STATE);
2970 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2971 	hm.u.c.attribute = HPI_TUNER_RDS;
2972 	hpi_send_recv(&hm, &hr);
2973 	if (p_data) {
2974 		*(u32 *)&p_data[0] = hr.u.cu.tuner.rds.data[0];
2975 		*(u32 *)&p_data[4] = hr.u.cu.tuner.rds.data[1];
2976 		*(u32 *)&p_data[8] = hr.u.cu.tuner.rds.bLER;
2977 	}
2978 	return hr.error;
2979 }
2980 
2981 u16 HPI_PAD__get_channel_name(const struct hpi_hsubsys *ph_subsys,
2982 	u32 h_control, char *psz_string, const u32 data_length)
2983 {
2984 	return hpi_control_get_string(ph_subsys, h_control,
2985 		HPI_PAD_CHANNEL_NAME, psz_string, data_length);
2986 }
2987 
2988 u16 HPI_PAD__get_artist(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2989 	char *psz_string, const u32 data_length)
2990 {
2991 	return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_ARTIST,
2992 		psz_string, data_length);
2993 }
2994 
2995 u16 HPI_PAD__get_title(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2996 	char *psz_string, const u32 data_length)
2997 {
2998 	return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_TITLE,
2999 		psz_string, data_length);
3000 }
3001 
3002 u16 HPI_PAD__get_comment(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3003 	char *psz_string, const u32 data_length)
3004 {
3005 	return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_COMMENT,
3006 		psz_string, data_length);
3007 }
3008 
3009 u16 HPI_PAD__get_program_type(const struct hpi_hsubsys *ph_subsys,
3010 	u32 h_control, u32 *ppTY)
3011 {
3012 	return hpi_control_param_get(ph_subsys, h_control,
3013 		HPI_PAD_PROGRAM_TYPE, 0, 0, ppTY, NULL);
3014 }
3015 
3016 u16 HPI_PAD__get_rdsPI(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3017 	u32 *ppI)
3018 {
3019 	return hpi_control_param_get(ph_subsys, h_control, HPI_PAD_PROGRAM_ID,
3020 		0, 0, ppI, NULL);
3021 }
3022 
3023 u16 hpi_volume_query_channels(const struct hpi_hsubsys *ph_subsys,
3024 	const u32 h_volume, u32 *p_channels)
3025 {
3026 	return hpi_control_query(ph_subsys, h_volume, HPI_VOLUME_NUM_CHANNELS,
3027 		0, 0, p_channels);
3028 }
3029 
3030 u16 hpi_volume_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3031 	short an_log_gain[HPI_MAX_CHANNELS]
3032 	)
3033 {
3034 	struct hpi_message hm;
3035 	struct hpi_response hr;
3036 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3037 		HPI_CONTROL_SET_STATE);
3038 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3039 	memcpy(hm.u.c.an_log_value, an_log_gain,
3040 		sizeof(short) * HPI_MAX_CHANNELS);
3041 	hm.u.c.attribute = HPI_VOLUME_GAIN;
3042 
3043 	hpi_send_recv(&hm, &hr);
3044 
3045 	return hr.error;
3046 }
3047 
3048 u16 hpi_volume_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3049 	short an_log_gain[HPI_MAX_CHANNELS]
3050 	)
3051 {
3052 	struct hpi_message hm;
3053 	struct hpi_response hr;
3054 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3055 		HPI_CONTROL_GET_STATE);
3056 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3057 	hm.u.c.attribute = HPI_VOLUME_GAIN;
3058 
3059 	hpi_send_recv(&hm, &hr);
3060 
3061 	memcpy(an_log_gain, hr.u.c.an_log_value,
3062 		sizeof(short) * HPI_MAX_CHANNELS);
3063 	return hr.error;
3064 }
3065 
3066 u16 hpi_volume_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3067 	short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
3068 {
3069 	struct hpi_message hm;
3070 	struct hpi_response hr;
3071 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3072 		HPI_CONTROL_GET_STATE);
3073 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3074 	hm.u.c.attribute = HPI_VOLUME_RANGE;
3075 
3076 	hpi_send_recv(&hm, &hr);
3077 	if (hr.error) {
3078 		hr.u.c.an_log_value[0] = 0;
3079 		hr.u.c.an_log_value[1] = 0;
3080 		hr.u.c.param1 = 0;
3081 	}
3082 	if (min_gain_01dB)
3083 		*min_gain_01dB = hr.u.c.an_log_value[0];
3084 	if (max_gain_01dB)
3085 		*max_gain_01dB = hr.u.c.an_log_value[1];
3086 	if (step_gain_01dB)
3087 		*step_gain_01dB = (short)hr.u.c.param1;
3088 	return hr.error;
3089 }
3090 
3091 u16 hpi_volume_auto_fade_profile(const struct hpi_hsubsys *ph_subsys,
3092 	u32 h_control, short an_stop_gain0_01dB[HPI_MAX_CHANNELS],
3093 	u32 duration_ms, u16 profile)
3094 {
3095 	struct hpi_message hm;
3096 	struct hpi_response hr;
3097 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3098 		HPI_CONTROL_SET_STATE);
3099 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3100 
3101 	memcpy(hm.u.c.an_log_value, an_stop_gain0_01dB,
3102 		sizeof(short) * HPI_MAX_CHANNELS);
3103 
3104 	hm.u.c.attribute = HPI_VOLUME_AUTOFADE;
3105 	hm.u.c.param1 = duration_ms;
3106 	hm.u.c.param2 = profile;
3107 
3108 	hpi_send_recv(&hm, &hr);
3109 
3110 	return hr.error;
3111 }
3112 
3113 u16 hpi_volume_auto_fade(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3114 	short an_stop_gain0_01dB[HPI_MAX_CHANNELS], u32 duration_ms)
3115 {
3116 	return hpi_volume_auto_fade_profile(ph_subsys, h_control,
3117 		an_stop_gain0_01dB, duration_ms, HPI_VOLUME_AUTOFADE_LOG);
3118 }
3119 
3120 u16 hpi_vox_set_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3121 	short an_gain0_01dB)
3122 {
3123 	struct hpi_message hm;
3124 	struct hpi_response hr;
3125 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3126 		HPI_CONTROL_SET_STATE);
3127 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3128 	hm.u.c.attribute = HPI_VOX_THRESHOLD;
3129 
3130 	hm.u.c.an_log_value[0] = an_gain0_01dB;
3131 
3132 	hpi_send_recv(&hm, &hr);
3133 
3134 	return hr.error;
3135 }
3136 
3137 u16 hpi_vox_get_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3138 	short *an_gain0_01dB)
3139 {
3140 	struct hpi_message hm;
3141 	struct hpi_response hr;
3142 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3143 		HPI_CONTROL_GET_STATE);
3144 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3145 	hm.u.c.attribute = HPI_VOX_THRESHOLD;
3146 
3147 	hpi_send_recv(&hm, &hr);
3148 
3149 	*an_gain0_01dB = hr.u.c.an_log_value[0];
3150 
3151 	return hr.error;
3152 }
3153 
3154 static size_t strv_packet_size = MIN_STRV_PACKET_SIZE;
3155 
3156 static size_t entity_type_to_size[LAST_ENTITY_TYPE] = {
3157 	0,
3158 	sizeof(struct hpi_entity),
3159 	sizeof(void *),
3160 
3161 	sizeof(int),
3162 	sizeof(float),
3163 	sizeof(double),
3164 
3165 	sizeof(char),
3166 	sizeof(char),
3167 
3168 	4 * sizeof(char),
3169 	16 * sizeof(char),
3170 	6 * sizeof(char),
3171 };
3172 
3173 inline size_t hpi_entity_size(struct hpi_entity *entity_ptr)
3174 {
3175 	return entity_ptr->header.size;
3176 }
3177 
3178 inline size_t hpi_entity_header_size(struct hpi_entity *entity_ptr)
3179 {
3180 	return sizeof(entity_ptr->header);
3181 }
3182 
3183 inline size_t hpi_entity_value_size(struct hpi_entity *entity_ptr)
3184 {
3185 	return hpi_entity_size(entity_ptr) -
3186 		hpi_entity_header_size(entity_ptr);
3187 }
3188 
3189 inline size_t hpi_entity_item_count(struct hpi_entity *entity_ptr)
3190 {
3191 	return hpi_entity_value_size(entity_ptr) /
3192 		entity_type_to_size[entity_ptr->header.type];
3193 }
3194 
3195 inline struct hpi_entity *hpi_entity_ptr_to_next(struct hpi_entity
3196 	*entity_ptr)
3197 {
3198 	return (void *)(((uint8_t *) entity_ptr) +
3199 		hpi_entity_size(entity_ptr));
3200 }
3201 
3202 inline u16 hpi_entity_check_type(const enum e_entity_type t)
3203 {
3204 	if (t >= 0 && t < STR_TYPE_FIELD_MAX)
3205 		return 0;
3206 	return HPI_ERROR_ENTITY_TYPE_INVALID;
3207 }
3208 
3209 inline u16 hpi_entity_check_role(const enum e_entity_role r)
3210 {
3211 	if (r >= 0 && r < STR_ROLE_FIELD_MAX)
3212 		return 0;
3213 	return HPI_ERROR_ENTITY_ROLE_INVALID;
3214 }
3215 
3216 static u16 hpi_entity_get_next(struct hpi_entity *entity, int recursive_flag,
3217 	void *guard_p, struct hpi_entity **next)
3218 {
3219 	HPI_DEBUG_ASSERT(entity != NULL);
3220 	HPI_DEBUG_ASSERT(next != NULL);
3221 	HPI_DEBUG_ASSERT(hpi_entity_size(entity) != 0);
3222 
3223 	if (guard_p <= (void *)entity) {
3224 		*next = NULL;
3225 		return 0;
3226 	}
3227 
3228 	if (recursive_flag && entity->header.type == entity_type_sequence)
3229 		*next = (struct hpi_entity *)entity->value;
3230 	else
3231 		*next = (struct hpi_entity *)hpi_entity_ptr_to_next(entity);
3232 
3233 	if (guard_p <= (void *)*next) {
3234 		*next = NULL;
3235 		return 0;
3236 	}
3237 
3238 	HPI_DEBUG_ASSERT(guard_p >= (void *)hpi_entity_ptr_to_next(*next));
3239 	return 0;
3240 }
3241 
3242 u16 hpi_entity_find_next(struct hpi_entity *container_entity,
3243 	enum e_entity_type type, enum e_entity_role role, int recursive_flag,
3244 	struct hpi_entity **current_match)
3245 {
3246 	struct hpi_entity *tmp = NULL;
3247 	void *guard_p = NULL;
3248 
3249 	HPI_DEBUG_ASSERT(container_entity != NULL);
3250 	guard_p = hpi_entity_ptr_to_next(container_entity);
3251 
3252 	if (*current_match != NULL)
3253 		hpi_entity_get_next(*current_match, recursive_flag, guard_p,
3254 			&tmp);
3255 	else
3256 		hpi_entity_get_next(container_entity, 1, guard_p, &tmp);
3257 
3258 	while (tmp) {
3259 		u16 err;
3260 
3261 		HPI_DEBUG_ASSERT((void *)tmp >= (void *)container_entity);
3262 
3263 		if ((!type || tmp->header.type == type) && (!role
3264 				|| tmp->header.role == role)) {
3265 			*current_match = tmp;
3266 			return 0;
3267 		}
3268 
3269 		err = hpi_entity_get_next(tmp, recursive_flag, guard_p,
3270 			current_match);
3271 		if (err)
3272 			return err;
3273 
3274 		tmp = *current_match;
3275 	}
3276 
3277 	*current_match = NULL;
3278 	return 0;
3279 }
3280 
3281 void hpi_entity_free(struct hpi_entity *entity)
3282 {
3283 	kfree(entity);
3284 }
3285 
3286 static u16 hpi_entity_alloc_and_copy(struct hpi_entity *src,
3287 	struct hpi_entity **dst)
3288 {
3289 	size_t buf_size;
3290 	HPI_DEBUG_ASSERT(dst != NULL);
3291 	HPI_DEBUG_ASSERT(src != NULL);
3292 
3293 	buf_size = hpi_entity_size(src);
3294 	*dst = kmalloc(buf_size, GFP_KERNEL);
3295 	if (*dst == NULL)
3296 		return HPI_ERROR_MEMORY_ALLOC;
3297 	memcpy(*dst, src, buf_size);
3298 	return 0;
3299 }
3300 
3301 u16 hpi_universal_info(const struct hpi_hsubsys *ph_subsys, u32 hC,
3302 	struct hpi_entity **info)
3303 {
3304 	struct hpi_msg_strv hm;
3305 	struct hpi_res_strv *phr;
3306 	u16 hpi_err;
3307 	int remaining_attempts = 2;
3308 	size_t resp_packet_size = 1024;
3309 
3310 	*info = NULL;
3311 
3312 	while (remaining_attempts--) {
3313 		phr = kmalloc(resp_packet_size, GFP_KERNEL);
3314 		HPI_DEBUG_ASSERT(phr != NULL);
3315 
3316 		hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3317 			(u16)resp_packet_size, HPI_OBJ_CONTROL,
3318 			HPI_CONTROL_GET_INFO);
3319 		u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3320 
3321 		hm.strv.header.size = sizeof(hm.strv);
3322 		phr->strv.header.size = resp_packet_size - sizeof(phr->h);
3323 
3324 		hpi_send_recv((struct hpi_message *)&hm.h,
3325 			(struct hpi_response *)&phr->h);
3326 		if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3327 
3328 			HPI_DEBUG_ASSERT(phr->h.specific_error >
3329 				MIN_STRV_PACKET_SIZE
3330 				&& phr->h.specific_error < 1500);
3331 			resp_packet_size = phr->h.specific_error;
3332 		} else {
3333 			remaining_attempts = 0;
3334 			if (!phr->h.error)
3335 				hpi_entity_alloc_and_copy(&phr->strv, info);
3336 		}
3337 
3338 		hpi_err = phr->h.error;
3339 		kfree(phr);
3340 	}
3341 
3342 	return hpi_err;
3343 }
3344 
3345 u16 hpi_universal_get(const struct hpi_hsubsys *ph_subsys, u32 hC,
3346 	struct hpi_entity **value)
3347 {
3348 	struct hpi_msg_strv hm;
3349 	struct hpi_res_strv *phr;
3350 	u16 hpi_err;
3351 	int remaining_attempts = 2;
3352 
3353 	*value = NULL;
3354 
3355 	while (remaining_attempts--) {
3356 		phr = kmalloc(strv_packet_size, GFP_KERNEL);
3357 		if (!phr)
3358 			return HPI_ERROR_MEMORY_ALLOC;
3359 
3360 		hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3361 			(u16)strv_packet_size, HPI_OBJ_CONTROL,
3362 			HPI_CONTROL_GET_STATE);
3363 		u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3364 
3365 		hm.strv.header.size = sizeof(hm.strv);
3366 		phr->strv.header.size = strv_packet_size - sizeof(phr->h);
3367 
3368 		hpi_send_recv((struct hpi_message *)&hm.h,
3369 			(struct hpi_response *)&phr->h);
3370 		if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3371 
3372 			HPI_DEBUG_ASSERT(phr->h.specific_error >
3373 				MIN_STRV_PACKET_SIZE
3374 				&& phr->h.specific_error < 1000);
3375 			strv_packet_size = phr->h.specific_error;
3376 		} else {
3377 			remaining_attempts = 0;
3378 			if (!phr->h.error)
3379 				hpi_entity_alloc_and_copy(&phr->strv, value);
3380 		}
3381 
3382 		hpi_err = phr->h.error;
3383 		kfree(phr);
3384 	}
3385 
3386 	return hpi_err;
3387 }
3388 
3389 u16 hpi_universal_set(const struct hpi_hsubsys *ph_subsys, u32 hC,
3390 	struct hpi_entity *value)
3391 {
3392 	struct hpi_msg_strv *phm;
3393 	struct hpi_res_strv hr;
3394 
3395 	phm = kmalloc(sizeof(phm->h) + value->header.size, GFP_KERNEL);
3396 	HPI_DEBUG_ASSERT(phm != NULL);
3397 
3398 	hpi_init_message_responseV1(&phm->h,
3399 		sizeof(phm->h) + value->header.size, &hr.h, sizeof(hr),
3400 		HPI_OBJ_CONTROL, HPI_CONTROL_SET_STATE);
3401 	u32TOINDEXES(hC, &phm->h.adapter_index, &phm->h.obj_index);
3402 	hr.strv.header.size = sizeof(hr.strv);
3403 
3404 	memcpy(&phm->strv, value, value->header.size);
3405 	hpi_send_recv((struct hpi_message *)&phm->h,
3406 		(struct hpi_response *)&hr.h);
3407 
3408 	return hr.h.error;
3409 }
3410 
3411 u16 hpi_entity_alloc_and_pack(const enum e_entity_type type,
3412 	const size_t item_count, const enum e_entity_role role, void *value,
3413 	struct hpi_entity **entity)
3414 {
3415 	size_t bytes_to_copy, total_size;
3416 	u16 hE = 0;
3417 	*entity = NULL;
3418 
3419 	hE = hpi_entity_check_type(type);
3420 	if (hE)
3421 		return hE;
3422 
3423 	HPI_DEBUG_ASSERT(role > entity_role_null && type < LAST_ENTITY_TYPE);
3424 
3425 	bytes_to_copy = entity_type_to_size[type] * item_count;
3426 	total_size = hpi_entity_header_size(*entity) + bytes_to_copy;
3427 
3428 	HPI_DEBUG_ASSERT(total_size >= hpi_entity_header_size(*entity)
3429 		&& total_size < STR_SIZE_FIELD_MAX);
3430 
3431 	*entity = kmalloc(total_size, GFP_KERNEL);
3432 	if (*entity == NULL)
3433 		return HPI_ERROR_MEMORY_ALLOC;
3434 	memcpy((*entity)->value, value, bytes_to_copy);
3435 	(*entity)->header.size =
3436 		hpi_entity_header_size(*entity) + bytes_to_copy;
3437 	(*entity)->header.type = type;
3438 	(*entity)->header.role = role;
3439 	return 0;
3440 }
3441 
3442 u16 hpi_entity_copy_value_from(struct hpi_entity *entity,
3443 	enum e_entity_type type, size_t item_count, void *value_dst_p)
3444 {
3445 	size_t bytes_to_copy;
3446 
3447 	if (entity->header.type != type)
3448 		return HPI_ERROR_ENTITY_TYPE_MISMATCH;
3449 
3450 	if (hpi_entity_item_count(entity) != item_count)
3451 		return HPI_ERROR_ENTITY_ITEM_COUNT;
3452 
3453 	bytes_to_copy = entity_type_to_size[type] * item_count;
3454 	memcpy(value_dst_p, entity->value, bytes_to_copy);
3455 	return 0;
3456 }
3457 
3458 u16 hpi_entity_unpack(struct hpi_entity *entity, enum e_entity_type *type,
3459 	size_t *item_count, enum e_entity_role *role, void **value)
3460 {
3461 	u16 err = 0;
3462 	HPI_DEBUG_ASSERT(entity != NULL);
3463 
3464 	if (type)
3465 		*type = entity->header.type;
3466 
3467 	if (role)
3468 		*role = entity->header.role;
3469 
3470 	if (value)
3471 		*value = entity->value;
3472 
3473 	if (item_count != NULL) {
3474 		if (entity->header.type == entity_type_sequence) {
3475 			void *guard_p = hpi_entity_ptr_to_next(entity);
3476 			struct hpi_entity *next = NULL;
3477 			void *contents = entity->value;
3478 
3479 			*item_count = 0;
3480 			while (contents < guard_p) {
3481 				(*item_count)++;
3482 				err = hpi_entity_get_next(contents, 0,
3483 					guard_p, &next);
3484 				if (next == NULL || err)
3485 					break;
3486 				contents = next;
3487 			}
3488 		} else {
3489 			*item_count = hpi_entity_item_count(entity);
3490 		}
3491 	}
3492 	return err;
3493 }
3494 
3495 u16 hpi_gpio_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3496 	u32 *ph_gpio, u16 *pw_number_input_bits, u16 *pw_number_output_bits)
3497 {
3498 	struct hpi_message hm;
3499 	struct hpi_response hr;
3500 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_OPEN);
3501 	hm.adapter_index = adapter_index;
3502 
3503 	hpi_send_recv(&hm, &hr);
3504 
3505 	if (hr.error == 0) {
3506 		*ph_gpio =
3507 			hpi_indexes_to_handle(HPI_OBJ_GPIO, adapter_index, 0);
3508 		if (pw_number_input_bits)
3509 			*pw_number_input_bits = hr.u.l.number_input_bits;
3510 		if (pw_number_output_bits)
3511 			*pw_number_output_bits = hr.u.l.number_output_bits;
3512 	} else
3513 		*ph_gpio = 0;
3514 	return hr.error;
3515 }
3516 
3517 u16 hpi_gpio_read_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3518 	u16 bit_index, u16 *pw_bit_data)
3519 {
3520 	struct hpi_message hm;
3521 	struct hpi_response hr;
3522 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_BIT);
3523 	u32TOINDEX(h_gpio, &hm.adapter_index);
3524 	hm.u.l.bit_index = bit_index;
3525 
3526 	hpi_send_recv(&hm, &hr);
3527 
3528 	*pw_bit_data = hr.u.l.bit_data[0];
3529 	return hr.error;
3530 }
3531 
3532 u16 hpi_gpio_read_all_bits(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3533 	u16 aw_all_bit_data[4]
3534 	)
3535 {
3536 	struct hpi_message hm;
3537 	struct hpi_response hr;
3538 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_ALL);
3539 	u32TOINDEX(h_gpio, &hm.adapter_index);
3540 
3541 	hpi_send_recv(&hm, &hr);
3542 
3543 	if (aw_all_bit_data) {
3544 		aw_all_bit_data[0] = hr.u.l.bit_data[0];
3545 		aw_all_bit_data[1] = hr.u.l.bit_data[1];
3546 		aw_all_bit_data[2] = hr.u.l.bit_data[2];
3547 		aw_all_bit_data[3] = hr.u.l.bit_data[3];
3548 	}
3549 	return hr.error;
3550 }
3551 
3552 u16 hpi_gpio_write_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3553 	u16 bit_index, u16 bit_data)
3554 {
3555 	struct hpi_message hm;
3556 	struct hpi_response hr;
3557 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_WRITE_BIT);
3558 	u32TOINDEX(h_gpio, &hm.adapter_index);
3559 	hm.u.l.bit_index = bit_index;
3560 	hm.u.l.bit_data = bit_data;
3561 
3562 	hpi_send_recv(&hm, &hr);
3563 
3564 	return hr.error;
3565 }
3566 
3567 u16 hpi_gpio_write_status(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3568 	u16 aw_all_bit_data[4]
3569 	)
3570 {
3571 	struct hpi_message hm;
3572 	struct hpi_response hr;
3573 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO,
3574 		HPI_GPIO_WRITE_STATUS);
3575 	u32TOINDEX(h_gpio, &hm.adapter_index);
3576 
3577 	hpi_send_recv(&hm, &hr);
3578 
3579 	if (aw_all_bit_data) {
3580 		aw_all_bit_data[0] = hr.u.l.bit_data[0];
3581 		aw_all_bit_data[1] = hr.u.l.bit_data[1];
3582 		aw_all_bit_data[2] = hr.u.l.bit_data[2];
3583 		aw_all_bit_data[3] = hr.u.l.bit_data[3];
3584 	}
3585 	return hr.error;
3586 }
3587 
3588 u16 hpi_async_event_open(const struct hpi_hsubsys *ph_subsys,
3589 	u16 adapter_index, u32 *ph_async)
3590 {
3591 	struct hpi_message hm;
3592 	struct hpi_response hr;
3593 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3594 		HPI_ASYNCEVENT_OPEN);
3595 	hm.adapter_index = adapter_index;
3596 
3597 	hpi_send_recv(&hm, &hr);
3598 
3599 	if (hr.error == 0)
3600 
3601 		*ph_async =
3602 			hpi_indexes_to_handle(HPI_OBJ_ASYNCEVENT,
3603 			adapter_index, 0);
3604 	else
3605 		*ph_async = 0;
3606 	return hr.error;
3607 
3608 }
3609 
3610 u16 hpi_async_event_close(const struct hpi_hsubsys *ph_subsys, u32 h_async)
3611 {
3612 	struct hpi_message hm;
3613 	struct hpi_response hr;
3614 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3615 		HPI_ASYNCEVENT_OPEN);
3616 	u32TOINDEX(h_async, &hm.adapter_index);
3617 
3618 	hpi_send_recv(&hm, &hr);
3619 
3620 	return hr.error;
3621 }
3622 
3623 u16 hpi_async_event_wait(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3624 	u16 maximum_events, struct hpi_async_event *p_events,
3625 	u16 *pw_number_returned)
3626 {
3627 	return 0;
3628 }
3629 
3630 u16 hpi_async_event_get_count(const struct hpi_hsubsys *ph_subsys,
3631 	u32 h_async, u16 *pw_count)
3632 {
3633 	struct hpi_message hm;
3634 	struct hpi_response hr;
3635 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3636 		HPI_ASYNCEVENT_GETCOUNT);
3637 	u32TOINDEX(h_async, &hm.adapter_index);
3638 
3639 	hpi_send_recv(&hm, &hr);
3640 
3641 	if (hr.error == 0)
3642 		if (pw_count)
3643 			*pw_count = hr.u.as.u.count.count;
3644 
3645 	return hr.error;
3646 }
3647 
3648 u16 hpi_async_event_get(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3649 	u16 maximum_events, struct hpi_async_event *p_events,
3650 	u16 *pw_number_returned)
3651 {
3652 	struct hpi_message hm;
3653 	struct hpi_response hr;
3654 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3655 		HPI_ASYNCEVENT_GET);
3656 	u32TOINDEX(h_async, &hm.adapter_index);
3657 
3658 	hpi_send_recv(&hm, &hr);
3659 	if (!hr.error) {
3660 		memcpy(p_events, &hr.u.as.u.event,
3661 			sizeof(struct hpi_async_event));
3662 		*pw_number_returned = 1;
3663 	}
3664 
3665 	return hr.error;
3666 }
3667 
3668 u16 hpi_nv_memory_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3669 	u32 *ph_nv_memory, u16 *pw_size_in_bytes)
3670 {
3671 	struct hpi_message hm;
3672 	struct hpi_response hr;
3673 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3674 		HPI_NVMEMORY_OPEN);
3675 	hm.adapter_index = adapter_index;
3676 
3677 	hpi_send_recv(&hm, &hr);
3678 
3679 	if (hr.error == 0) {
3680 		*ph_nv_memory =
3681 			hpi_indexes_to_handle(HPI_OBJ_NVMEMORY, adapter_index,
3682 			0);
3683 		if (pw_size_in_bytes)
3684 			*pw_size_in_bytes = hr.u.n.size_in_bytes;
3685 	} else
3686 		*ph_nv_memory = 0;
3687 	return hr.error;
3688 }
3689 
3690 u16 hpi_nv_memory_read_byte(const struct hpi_hsubsys *ph_subsys,
3691 	u32 h_nv_memory, u16 index, u16 *pw_data)
3692 {
3693 	struct hpi_message hm;
3694 	struct hpi_response hr;
3695 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3696 		HPI_NVMEMORY_READ_BYTE);
3697 	u32TOINDEX(h_nv_memory, &hm.adapter_index);
3698 	hm.u.n.address = index;
3699 
3700 	hpi_send_recv(&hm, &hr);
3701 
3702 	*pw_data = hr.u.n.data;
3703 	return hr.error;
3704 }
3705 
3706 u16 hpi_nv_memory_write_byte(const struct hpi_hsubsys *ph_subsys,
3707 	u32 h_nv_memory, u16 index, u16 data)
3708 {
3709 	struct hpi_message hm;
3710 	struct hpi_response hr;
3711 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3712 		HPI_NVMEMORY_WRITE_BYTE);
3713 	u32TOINDEX(h_nv_memory, &hm.adapter_index);
3714 	hm.u.n.address = index;
3715 	hm.u.n.data = data;
3716 
3717 	hpi_send_recv(&hm, &hr);
3718 
3719 	return hr.error;
3720 }
3721 
3722 u16 hpi_profile_open_all(const struct hpi_hsubsys *ph_subsys,
3723 	u16 adapter_index, u16 profile_index, u32 *ph_profile,
3724 	u16 *pw_max_profiles)
3725 {
3726 	struct hpi_message hm;
3727 	struct hpi_response hr;
3728 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3729 		HPI_PROFILE_OPEN_ALL);
3730 	hm.adapter_index = adapter_index;
3731 	hm.obj_index = profile_index;
3732 	hpi_send_recv(&hm, &hr);
3733 
3734 	*pw_max_profiles = hr.u.p.u.o.max_profiles;
3735 	if (hr.error == 0)
3736 		*ph_profile =
3737 			hpi_indexes_to_handle(HPI_OBJ_PROFILE, adapter_index,
3738 			profile_index);
3739 	else
3740 		*ph_profile = 0;
3741 	return hr.error;
3742 }
3743 
3744 u16 hpi_profile_get(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3745 	u16 bin_index, u16 *pw_seconds, u32 *pmicro_seconds, u32 *pcall_count,
3746 	u32 *pmax_micro_seconds, u32 *pmin_micro_seconds)
3747 {
3748 	struct hpi_message hm;
3749 	struct hpi_response hr;
3750 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE, HPI_PROFILE_GET);
3751 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3752 	hm.u.p.bin_index = bin_index;
3753 	hpi_send_recv(&hm, &hr);
3754 	if (pw_seconds)
3755 		*pw_seconds = hr.u.p.u.t.seconds;
3756 	if (pmicro_seconds)
3757 		*pmicro_seconds = hr.u.p.u.t.micro_seconds;
3758 	if (pcall_count)
3759 		*pcall_count = hr.u.p.u.t.call_count;
3760 	if (pmax_micro_seconds)
3761 		*pmax_micro_seconds = hr.u.p.u.t.max_micro_seconds;
3762 	if (pmin_micro_seconds)
3763 		*pmin_micro_seconds = hr.u.p.u.t.min_micro_seconds;
3764 	return hr.error;
3765 }
3766 
3767 u16 hpi_profile_get_utilization(const struct hpi_hsubsys *ph_subsys,
3768 	u32 h_profile, u32 *putilization)
3769 {
3770 	struct hpi_message hm;
3771 	struct hpi_response hr;
3772 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3773 		HPI_PROFILE_GET_UTILIZATION);
3774 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3775 	hpi_send_recv(&hm, &hr);
3776 	if (hr.error) {
3777 		if (putilization)
3778 			*putilization = 0;
3779 	} else {
3780 		if (putilization)
3781 			*putilization = hr.u.p.u.t.call_count;
3782 	}
3783 	return hr.error;
3784 }
3785 
3786 u16 hpi_profile_get_name(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3787 	u16 bin_index, char *sz_name, u16 name_length)
3788 {
3789 	struct hpi_message hm;
3790 	struct hpi_response hr;
3791 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3792 		HPI_PROFILE_GET_NAME);
3793 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3794 	hm.u.p.bin_index = bin_index;
3795 	hpi_send_recv(&hm, &hr);
3796 	if (hr.error) {
3797 		if (sz_name)
3798 			strcpy(sz_name, "??");
3799 	} else {
3800 		if (sz_name)
3801 			memcpy(sz_name, (char *)hr.u.p.u.n.sz_name,
3802 				name_length);
3803 	}
3804 	return hr.error;
3805 }
3806 
3807 u16 hpi_profile_start_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3808 {
3809 	struct hpi_message hm;
3810 	struct hpi_response hr;
3811 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3812 		HPI_PROFILE_START_ALL);
3813 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3814 	hpi_send_recv(&hm, &hr);
3815 
3816 	return hr.error;
3817 }
3818 
3819 u16 hpi_profile_stop_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3820 {
3821 	struct hpi_message hm;
3822 	struct hpi_response hr;
3823 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3824 		HPI_PROFILE_STOP_ALL);
3825 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3826 	hpi_send_recv(&hm, &hr);
3827 
3828 	return hr.error;
3829 }
3830 
3831 u16 hpi_watchdog_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3832 	u32 *ph_watchdog)
3833 {
3834 	struct hpi_message hm;
3835 	struct hpi_response hr;
3836 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3837 		HPI_WATCHDOG_OPEN);
3838 	hm.adapter_index = adapter_index;
3839 
3840 	hpi_send_recv(&hm, &hr);
3841 
3842 	if (hr.error == 0)
3843 		*ph_watchdog =
3844 			hpi_indexes_to_handle(HPI_OBJ_WATCHDOG, adapter_index,
3845 			0);
3846 	else
3847 		*ph_watchdog = 0;
3848 	return hr.error;
3849 }
3850 
3851 u16 hpi_watchdog_set_time(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog,
3852 	u32 time_millisec)
3853 {
3854 	struct hpi_message hm;
3855 	struct hpi_response hr;
3856 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3857 		HPI_WATCHDOG_SET_TIME);
3858 	u32TOINDEX(h_watchdog, &hm.adapter_index);
3859 	hm.u.w.time_ms = time_millisec;
3860 
3861 	hpi_send_recv(&hm, &hr);
3862 
3863 	return hr.error;
3864 }
3865 
3866 u16 hpi_watchdog_ping(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog)
3867 {
3868 	struct hpi_message hm;
3869 	struct hpi_response hr;
3870 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3871 		HPI_WATCHDOG_PING);
3872 	u32TOINDEX(h_watchdog, &hm.adapter_index);
3873 
3874 	hpi_send_recv(&hm, &hr);
3875 
3876 	return hr.error;
3877 }
3878