ALSA: Add support of AudioScience ASI boards
[deliverable/linux.git] / sound / pci / asihpi / hpifunc.c
1
2 #include "hpi_internal.h"
3 #include "hpimsginit.h"
4
5 #include "hpidebug.h"
6
7 struct hpi_handle {
8 unsigned int obj_index:12;
9 unsigned int obj_type:4;
10 unsigned int adapter_index:14;
11 unsigned int spare:1;
12 unsigned int read_only:1;
13 };
14
15 union handle_word {
16 struct hpi_handle h;
17 u32 w;
18 };
19
20 u32 hpi_indexes_to_handle(const char c_object, const u16 adapter_index,
21 const u16 object_index)
22 {
23 union handle_word handle;
24
25 handle.h.adapter_index = adapter_index;
26 handle.h.spare = 0;
27 handle.h.read_only = 0;
28 handle.h.obj_type = c_object;
29 handle.h.obj_index = object_index;
30 return handle.w;
31 }
32
33 void hpi_handle_to_indexes(const u32 handle, u16 *pw_adapter_index,
34 u16 *pw_object_index)
35 {
36 union handle_word uhandle;
37 uhandle.w = handle;
38
39 if (pw_adapter_index)
40 *pw_adapter_index = (u16)uhandle.h.adapter_index;
41 if (pw_object_index)
42 *pw_object_index = (u16)uhandle.h.obj_index;
43 }
44
45 char hpi_handle_object(const u32 handle)
46 {
47 union handle_word uhandle;
48 uhandle.w = handle;
49 return (char)uhandle.h.obj_type;
50 }
51
52 #define u32TOINDEX(h, i1) \
53 do {\
54 if (h == 0) \
55 return HPI_ERROR_INVALID_OBJ; \
56 else \
57 hpi_handle_to_indexes(h, i1, NULL); \
58 } while (0)
59
60 #define u32TOINDEXES(h, i1, i2) \
61 do {\
62 if (h == 0) \
63 return HPI_ERROR_INVALID_OBJ; \
64 else \
65 hpi_handle_to_indexes(h, i1, i2);\
66 } while (0)
67
68 void hpi_format_to_msg(struct hpi_msg_format *pMF,
69 const struct hpi_format *pF)
70 {
71 pMF->sample_rate = pF->sample_rate;
72 pMF->bit_rate = pF->bit_rate;
73 pMF->attributes = pF->attributes;
74 pMF->channels = pF->channels;
75 pMF->format = pF->format;
76 }
77
78 static void hpi_msg_to_format(struct hpi_format *pF,
79 struct hpi_msg_format *pMF)
80 {
81 pF->sample_rate = pMF->sample_rate;
82 pF->bit_rate = pMF->bit_rate;
83 pF->attributes = pMF->attributes;
84 pF->channels = pMF->channels;
85 pF->format = pMF->format;
86 pF->mode_legacy = 0;
87 pF->unused = 0;
88 }
89
90 void hpi_stream_response_to_legacy(struct hpi_stream_res *pSR)
91 {
92 pSR->u.legacy_stream_info.auxiliary_data_available =
93 pSR->u.stream_info.auxiliary_data_available;
94 pSR->u.legacy_stream_info.state = pSR->u.stream_info.state;
95 }
96
97 static struct hpi_hsubsys gh_subsys;
98
99 struct hpi_hsubsys *hpi_subsys_create(void
100 )
101 {
102 struct hpi_message hm;
103 struct hpi_response hr;
104
105 memset(&gh_subsys, 0, sizeof(struct hpi_hsubsys));
106
107 {
108 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
109 HPI_SUBSYS_OPEN);
110 hpi_send_recv(&hm, &hr);
111
112 if (hr.error == 0)
113 return &gh_subsys;
114
115 }
116 return NULL;
117 }
118
119 void hpi_subsys_free(const struct hpi_hsubsys *ph_subsys)
120 {
121 struct hpi_message hm;
122 struct hpi_response hr;
123
124 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
125 HPI_SUBSYS_CLOSE);
126 hpi_send_recv(&hm, &hr);
127
128 }
129
130 u16 hpi_subsys_get_version(const struct hpi_hsubsys *ph_subsys, u32 *pversion)
131 {
132 struct hpi_message hm;
133 struct hpi_response hr;
134
135 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
136 HPI_SUBSYS_GET_VERSION);
137 hpi_send_recv(&hm, &hr);
138 *pversion = hr.u.s.version;
139 return hr.error;
140 }
141
142 u16 hpi_subsys_get_version_ex(const struct hpi_hsubsys *ph_subsys,
143 u32 *pversion_ex)
144 {
145 struct hpi_message hm;
146 struct hpi_response hr;
147
148 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
149 HPI_SUBSYS_GET_VERSION);
150 hpi_send_recv(&hm, &hr);
151 *pversion_ex = hr.u.s.data;
152 return hr.error;
153 }
154
155 u16 hpi_subsys_get_info(const struct hpi_hsubsys *ph_subsys, u32 *pversion,
156 u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
157 {
158 struct hpi_message hm;
159 struct hpi_response hr;
160 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
161 HPI_SUBSYS_GET_INFO);
162
163 hpi_send_recv(&hm, &hr);
164
165 *pversion = hr.u.s.version;
166 if (list_length > HPI_MAX_ADAPTERS)
167 memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
168 HPI_MAX_ADAPTERS);
169 else
170 memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list, list_length);
171 *pw_num_adapters = hr.u.s.num_adapters;
172 return hr.error;
173 }
174
175 u16 hpi_subsys_find_adapters(const struct hpi_hsubsys *ph_subsys,
176 u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
177 {
178 struct hpi_message hm;
179 struct hpi_response hr;
180 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
181 HPI_SUBSYS_FIND_ADAPTERS);
182
183 hpi_send_recv(&hm, &hr);
184
185 if (list_length > HPI_MAX_ADAPTERS) {
186 memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
187 HPI_MAX_ADAPTERS * sizeof(u16));
188 memset(&aw_adapter_list[HPI_MAX_ADAPTERS], 0,
189 (list_length - HPI_MAX_ADAPTERS) * sizeof(u16));
190 } else
191 memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
192 list_length * sizeof(u16));
193 *pw_num_adapters = hr.u.s.num_adapters;
194
195 return hr.error;
196 }
197
198 u16 hpi_subsys_create_adapter(const struct hpi_hsubsys *ph_subsys,
199 const struct hpi_resource *p_resource, u16 *pw_adapter_index)
200 {
201 struct hpi_message hm;
202 struct hpi_response hr;
203
204 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
205 HPI_SUBSYS_CREATE_ADAPTER);
206 hm.u.s.resource = *p_resource;
207
208 hpi_send_recv(&hm, &hr);
209
210 *pw_adapter_index = hr.u.s.adapter_index;
211 return hr.error;
212 }
213
214 u16 hpi_subsys_delete_adapter(const struct hpi_hsubsys *ph_subsys,
215 u16 adapter_index)
216 {
217 struct hpi_message hm;
218 struct hpi_response hr;
219 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
220 HPI_SUBSYS_DELETE_ADAPTER);
221 hm.adapter_index = adapter_index;
222 hpi_send_recv(&hm, &hr);
223 return hr.error;
224 }
225
226 u16 hpi_subsys_get_num_adapters(const struct hpi_hsubsys *ph_subsys,
227 int *pn_num_adapters)
228 {
229 struct hpi_message hm;
230 struct hpi_response hr;
231 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
232 HPI_SUBSYS_GET_NUM_ADAPTERS);
233 hpi_send_recv(&hm, &hr);
234 *pn_num_adapters = (int)hr.u.s.num_adapters;
235 return hr.error;
236 }
237
238 u16 hpi_subsys_get_adapter(const struct hpi_hsubsys *ph_subsys, int iterator,
239 u32 *padapter_index, u16 *pw_adapter_type)
240 {
241 struct hpi_message hm;
242 struct hpi_response hr;
243 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
244 HPI_SUBSYS_GET_ADAPTER);
245 hm.adapter_index = (u16)iterator;
246 hpi_send_recv(&hm, &hr);
247 *padapter_index = (int)hr.u.s.adapter_index;
248 *pw_adapter_type = hr.u.s.aw_adapter_list[0];
249 return hr.error;
250 }
251
252 u16 hpi_subsys_set_host_network_interface(const struct hpi_hsubsys *ph_subsys,
253 const char *sz_interface)
254 {
255 struct hpi_message hm;
256 struct hpi_response hr;
257 hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
258 HPI_SUBSYS_SET_NETWORK_INTERFACE);
259 if (sz_interface == NULL)
260 return HPI_ERROR_INVALID_RESOURCE;
261 hm.u.s.resource.r.net_if = sz_interface;
262 hpi_send_recv(&hm, &hr);
263 return hr.error;
264 }
265
266 u16 hpi_adapter_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
267 {
268 struct hpi_message hm;
269 struct hpi_response hr;
270 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
271 HPI_ADAPTER_OPEN);
272 hm.adapter_index = adapter_index;
273
274 hpi_send_recv(&hm, &hr);
275
276 return hr.error;
277
278 }
279
280 u16 hpi_adapter_close(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
281 {
282 struct hpi_message hm;
283 struct hpi_response hr;
284 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
285 HPI_ADAPTER_CLOSE);
286 hm.adapter_index = adapter_index;
287
288 hpi_send_recv(&hm, &hr);
289
290 return hr.error;
291 }
292
293 u16 hpi_adapter_set_mode(const struct hpi_hsubsys *ph_subsys,
294 u16 adapter_index, u32 adapter_mode)
295 {
296 return hpi_adapter_set_mode_ex(ph_subsys, adapter_index, adapter_mode,
297 HPI_ADAPTER_MODE_SET);
298 }
299
300 u16 hpi_adapter_set_mode_ex(const struct hpi_hsubsys *ph_subsys,
301 u16 adapter_index, u32 adapter_mode, u16 query_or_set)
302 {
303 struct hpi_message hm;
304 struct hpi_response hr;
305 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
306 HPI_ADAPTER_SET_MODE);
307 hm.adapter_index = adapter_index;
308 hm.u.a.adapter_mode = adapter_mode;
309 hm.u.a.assert_id = query_or_set;
310 hpi_send_recv(&hm, &hr);
311 return hr.error;
312 }
313
314 u16 hpi_adapter_get_mode(const struct hpi_hsubsys *ph_subsys,
315 u16 adapter_index, u32 *padapter_mode)
316 {
317 struct hpi_message hm;
318 struct hpi_response hr;
319 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
320 HPI_ADAPTER_GET_MODE);
321 hm.adapter_index = adapter_index;
322 hpi_send_recv(&hm, &hr);
323 if (padapter_mode)
324 *padapter_mode = hr.u.a.serial_number;
325 return hr.error;
326 }
327
328 u16 hpi_adapter_get_info(const struct hpi_hsubsys *ph_subsys,
329 u16 adapter_index, u16 *pw_num_outstreams, u16 *pw_num_instreams,
330 u16 *pw_version, u32 *pserial_number, u16 *pw_adapter_type)
331 {
332 struct hpi_message hm;
333 struct hpi_response hr;
334 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
335 HPI_ADAPTER_GET_INFO);
336 hm.adapter_index = adapter_index;
337
338 hpi_send_recv(&hm, &hr);
339
340 *pw_adapter_type = hr.u.a.adapter_type;
341 *pw_num_outstreams = hr.u.a.num_outstreams;
342 *pw_num_instreams = hr.u.a.num_instreams;
343 *pw_version = hr.u.a.version;
344 *pserial_number = hr.u.a.serial_number;
345 return hr.error;
346 }
347
348 u16 hpi_adapter_get_module_by_index(const struct hpi_hsubsys *ph_subsys,
349 u16 adapter_index, u16 module_index, u16 *pw_num_outputs,
350 u16 *pw_num_inputs, u16 *pw_version, u32 *pserial_number,
351 u16 *pw_module_type, u32 *ph_module)
352 {
353 struct hpi_message hm;
354 struct hpi_response hr;
355
356 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
357 HPI_ADAPTER_MODULE_INFO);
358 hm.adapter_index = adapter_index;
359 hm.u.ax.module_info.index = module_index;
360
361 hpi_send_recv(&hm, &hr);
362
363 *pw_module_type = hr.u.a.adapter_type;
364 *pw_num_outputs = hr.u.a.num_outstreams;
365 *pw_num_inputs = hr.u.a.num_instreams;
366 *pw_version = hr.u.a.version;
367 *pserial_number = hr.u.a.serial_number;
368 *ph_module = 0;
369
370 return hr.error;
371 }
372
373 u16 hpi_adapter_get_assert(const struct hpi_hsubsys *ph_subsys,
374 u16 adapter_index, u16 *assert_present, char *psz_assert,
375 u16 *pw_line_number)
376 {
377 struct hpi_message hm;
378 struct hpi_response hr;
379 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
380 HPI_ADAPTER_GET_ASSERT);
381 hm.adapter_index = adapter_index;
382 hpi_send_recv(&hm, &hr);
383
384 *assert_present = 0;
385
386 if (!hr.error) {
387
388 *pw_line_number = (u16)hr.u.a.serial_number;
389 if (*pw_line_number) {
390
391 int i;
392 char *src = (char *)hr.u.a.sz_adapter_assert;
393 char *dst = psz_assert;
394
395 *assert_present = 1;
396
397 for (i = 0; i < HPI_STRING_LEN; i++) {
398 char c;
399 c = *src++;
400 *dst++ = c;
401 if (c == 0)
402 break;
403 }
404
405 }
406 }
407 return hr.error;
408 }
409
410 u16 hpi_adapter_get_assert_ex(const struct hpi_hsubsys *ph_subsys,
411 u16 adapter_index, u16 *assert_present, char *psz_assert,
412 u32 *pline_number, u16 *pw_assert_on_dsp)
413 {
414 struct hpi_message hm;
415 struct hpi_response hr;
416 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
417 HPI_ADAPTER_GET_ASSERT);
418 hm.adapter_index = adapter_index;
419
420 hpi_send_recv(&hm, &hr);
421
422 *assert_present = 0;
423
424 if (!hr.error) {
425
426 *pline_number = hr.u.a.serial_number;
427
428 *assert_present = hr.u.a.adapter_type;
429
430 *pw_assert_on_dsp = hr.u.a.adapter_index;
431
432 if (!*assert_present && *pline_number)
433
434 *assert_present = 1;
435
436 if (*assert_present) {
437
438 int i;
439 char *src = (char *)hr.u.a.sz_adapter_assert;
440 char *dst = psz_assert;
441
442 for (i = 0; i < HPI_STRING_LEN; i++) {
443 char c;
444 c = *src++;
445 *dst++ = c;
446 if (c == 0)
447 break;
448 }
449
450 } else {
451 *psz_assert = 0;
452 }
453 }
454 return hr.error;
455 }
456
457 u16 hpi_adapter_test_assert(const struct hpi_hsubsys *ph_subsys,
458 u16 adapter_index, u16 assert_id)
459 {
460 struct hpi_message hm;
461 struct hpi_response hr;
462 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
463 HPI_ADAPTER_TEST_ASSERT);
464 hm.adapter_index = adapter_index;
465 hm.u.a.assert_id = assert_id;
466
467 hpi_send_recv(&hm, &hr);
468
469 return hr.error;
470 }
471
472 u16 hpi_adapter_enable_capability(const struct hpi_hsubsys *ph_subsys,
473 u16 adapter_index, u16 capability, u32 key)
474 {
475 struct hpi_message hm;
476 struct hpi_response hr;
477 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
478 HPI_ADAPTER_ENABLE_CAPABILITY);
479 hm.adapter_index = adapter_index;
480 hm.u.a.assert_id = capability;
481 hm.u.a.adapter_mode = key;
482
483 hpi_send_recv(&hm, &hr);
484
485 return hr.error;
486 }
487
488 u16 hpi_adapter_self_test(const struct hpi_hsubsys *ph_subsys,
489 u16 adapter_index)
490 {
491 struct hpi_message hm;
492 struct hpi_response hr;
493 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
494 HPI_ADAPTER_SELFTEST);
495 hm.adapter_index = adapter_index;
496 hpi_send_recv(&hm, &hr);
497 return hr.error;
498 }
499
500 u16 hpi_adapter_debug_read(const struct hpi_hsubsys *ph_subsys,
501 u16 adapter_index, u32 dsp_address, char *p_buffer, int *count_bytes)
502 {
503 struct hpi_message hm;
504 struct hpi_response hr;
505 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
506 HPI_ADAPTER_DEBUG_READ);
507
508 hr.size = sizeof(hr);
509
510 hm.adapter_index = adapter_index;
511 hm.u.ax.debug_read.dsp_address = dsp_address;
512
513 if (*count_bytes > sizeof(hr.u.bytes))
514 *count_bytes = sizeof(hr.u.bytes);
515
516 hm.u.ax.debug_read.count_bytes = *count_bytes;
517
518 hpi_send_recv(&hm, &hr);
519
520 if (!hr.error) {
521 *count_bytes = hr.size - 12;
522 memcpy(p_buffer, &hr.u.bytes, *count_bytes);
523 } else
524 *count_bytes = 0;
525 return hr.error;
526 }
527
528 u16 hpi_adapter_set_property(const struct hpi_hsubsys *ph_subsys,
529 u16 adapter_index, u16 property, u16 parameter1, u16 parameter2)
530 {
531 struct hpi_message hm;
532 struct hpi_response hr;
533 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
534 HPI_ADAPTER_SET_PROPERTY);
535 hm.adapter_index = adapter_index;
536 hm.u.ax.property_set.property = property;
537 hm.u.ax.property_set.parameter1 = parameter1;
538 hm.u.ax.property_set.parameter2 = parameter2;
539
540 hpi_send_recv(&hm, &hr);
541
542 return hr.error;
543 }
544
545 u16 hpi_adapter_get_property(const struct hpi_hsubsys *ph_subsys,
546 u16 adapter_index, u16 property, u16 *pw_parameter1,
547 u16 *pw_parameter2)
548 {
549 struct hpi_message hm;
550 struct hpi_response hr;
551 hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
552 HPI_ADAPTER_GET_PROPERTY);
553 hm.adapter_index = adapter_index;
554 hm.u.ax.property_set.property = property;
555
556 hpi_send_recv(&hm, &hr);
557 if (!hr.error) {
558 if (pw_parameter1)
559 *pw_parameter1 = hr.u.ax.property_get.parameter1;
560 if (pw_parameter2)
561 *pw_parameter2 = hr.u.ax.property_get.parameter2;
562 }
563
564 return hr.error;
565 }
566
567 u16 hpi_adapter_enumerate_property(const struct hpi_hsubsys *ph_subsys,
568 u16 adapter_index, u16 index, u16 what_to_enumerate,
569 u16 property_index, u32 *psetting)
570 {
571 return 0;
572 }
573
574 u16 hpi_format_create(struct hpi_format *p_format, u16 channels, u16 format,
575 u32 sample_rate, u32 bit_rate, u32 attributes)
576 {
577 u16 error = 0;
578 struct hpi_msg_format fmt;
579
580 switch (channels) {
581 case 1:
582 case 2:
583 case 4:
584 case 6:
585 case 8:
586 case 16:
587 break;
588 default:
589 error = HPI_ERROR_INVALID_CHANNELS;
590 return error;
591 }
592 fmt.channels = channels;
593
594 switch (format) {
595 case HPI_FORMAT_PCM16_SIGNED:
596 case HPI_FORMAT_PCM24_SIGNED:
597 case HPI_FORMAT_PCM32_SIGNED:
598 case HPI_FORMAT_PCM32_FLOAT:
599 case HPI_FORMAT_PCM16_BIGENDIAN:
600 case HPI_FORMAT_PCM8_UNSIGNED:
601 case HPI_FORMAT_MPEG_L1:
602 case HPI_FORMAT_MPEG_L2:
603 case HPI_FORMAT_MPEG_L3:
604 case HPI_FORMAT_DOLBY_AC2:
605 case HPI_FORMAT_AA_TAGIT1_HITS:
606 case HPI_FORMAT_AA_TAGIT1_INSERTS:
607 case HPI_FORMAT_RAW_BITSTREAM:
608 case HPI_FORMAT_AA_TAGIT1_HITS_EX1:
609 case HPI_FORMAT_OEM1:
610 case HPI_FORMAT_OEM2:
611 break;
612 default:
613 error = HPI_ERROR_INVALID_FORMAT;
614 return error;
615 }
616 fmt.format = format;
617
618 if (sample_rate < 8000L) {
619 error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
620 sample_rate = 8000L;
621 }
622 if (sample_rate > 200000L) {
623 error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
624 sample_rate = 200000L;
625 }
626 fmt.sample_rate = sample_rate;
627
628 switch (format) {
629 case HPI_FORMAT_MPEG_L1:
630 case HPI_FORMAT_MPEG_L2:
631 case HPI_FORMAT_MPEG_L3:
632 fmt.bit_rate = bit_rate;
633 break;
634 case HPI_FORMAT_PCM16_SIGNED:
635 case HPI_FORMAT_PCM16_BIGENDIAN:
636 fmt.bit_rate = channels * sample_rate * 2;
637 break;
638 case HPI_FORMAT_PCM32_SIGNED:
639 case HPI_FORMAT_PCM32_FLOAT:
640 fmt.bit_rate = channels * sample_rate * 4;
641 break;
642 case HPI_FORMAT_PCM8_UNSIGNED:
643 fmt.bit_rate = channels * sample_rate;
644 break;
645 default:
646 fmt.bit_rate = 0;
647 }
648
649 switch (format) {
650 case HPI_FORMAT_MPEG_L2:
651 if ((channels == 1)
652 && (attributes != HPI_MPEG_MODE_DEFAULT)) {
653 attributes = HPI_MPEG_MODE_DEFAULT;
654 error = HPI_ERROR_INVALID_FORMAT;
655 } else if (attributes > HPI_MPEG_MODE_DUALCHANNEL) {
656 attributes = HPI_MPEG_MODE_DEFAULT;
657 error = HPI_ERROR_INVALID_FORMAT;
658 }
659 fmt.attributes = attributes;
660 break;
661 default:
662 fmt.attributes = attributes;
663 }
664
665 hpi_msg_to_format(p_format, &fmt);
666 return error;
667 }
668
669 u16 hpi_stream_estimate_buffer_size(struct hpi_format *p_format,
670 u32 host_polling_rate_in_milli_seconds, u32 *recommended_buffer_size)
671 {
672
673 u32 bytes_per_second;
674 u32 size;
675 u16 channels;
676 struct hpi_format *pF = p_format;
677
678 channels = pF->channels;
679
680 switch (pF->format) {
681 case HPI_FORMAT_PCM16_BIGENDIAN:
682 case HPI_FORMAT_PCM16_SIGNED:
683 bytes_per_second = pF->sample_rate * 2L * channels;
684 break;
685 case HPI_FORMAT_PCM24_SIGNED:
686 bytes_per_second = pF->sample_rate * 3L * channels;
687 break;
688 case HPI_FORMAT_PCM32_SIGNED:
689 case HPI_FORMAT_PCM32_FLOAT:
690 bytes_per_second = pF->sample_rate * 4L * channels;
691 break;
692 case HPI_FORMAT_PCM8_UNSIGNED:
693 bytes_per_second = pF->sample_rate * 1L * channels;
694 break;
695 case HPI_FORMAT_MPEG_L1:
696 case HPI_FORMAT_MPEG_L2:
697 case HPI_FORMAT_MPEG_L3:
698 bytes_per_second = pF->bit_rate / 8L;
699 break;
700 case HPI_FORMAT_DOLBY_AC2:
701
702 bytes_per_second = 256000L / 8L;
703 break;
704 default:
705 return HPI_ERROR_INVALID_FORMAT;
706 }
707 size = (bytes_per_second * host_polling_rate_in_milli_seconds * 2) /
708 1000L;
709
710 *recommended_buffer_size =
711 roundup_pow_of_two(((size + 4095L) & ~4095L));
712 return 0;
713 }
714
715 u16 hpi_outstream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
716 u16 outstream_index, u32 *ph_outstream)
717 {
718 struct hpi_message hm;
719 struct hpi_response hr;
720 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
721 HPI_OSTREAM_OPEN);
722 hm.adapter_index = adapter_index;
723 hm.obj_index = outstream_index;
724
725 hpi_send_recv(&hm, &hr);
726
727 if (hr.error == 0)
728 *ph_outstream =
729 hpi_indexes_to_handle(HPI_OBJ_OSTREAM, adapter_index,
730 outstream_index);
731 else
732 *ph_outstream = 0;
733 return hr.error;
734 }
735
736 u16 hpi_outstream_close(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
737 {
738 struct hpi_message hm;
739 struct hpi_response hr;
740
741 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
742 HPI_OSTREAM_HOSTBUFFER_FREE);
743 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
744 hpi_send_recv(&hm, &hr);
745
746 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
747 HPI_OSTREAM_GROUP_RESET);
748 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
749 hpi_send_recv(&hm, &hr);
750
751 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
752 HPI_OSTREAM_CLOSE);
753 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
754 hpi_send_recv(&hm, &hr);
755
756 return hr.error;
757 }
758
759 u16 hpi_outstream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
760 u32 h_outstream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_to_play,
761 u32 *psamples_played, u32 *pauxiliary_data_to_play)
762 {
763 struct hpi_message hm;
764 struct hpi_response hr;
765 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
766 HPI_OSTREAM_GET_INFO);
767 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
768
769 hpi_send_recv(&hm, &hr);
770
771 if (pw_state)
772 *pw_state = hr.u.d.u.stream_info.state;
773 if (pbuffer_size)
774 *pbuffer_size = hr.u.d.u.stream_info.buffer_size;
775 if (pdata_to_play)
776 *pdata_to_play = hr.u.d.u.stream_info.data_available;
777 if (psamples_played)
778 *psamples_played = hr.u.d.u.stream_info.samples_transferred;
779 if (pauxiliary_data_to_play)
780 *pauxiliary_data_to_play =
781 hr.u.d.u.stream_info.auxiliary_data_available;
782 return hr.error;
783 }
784
785 u16 hpi_outstream_write_buf(const struct hpi_hsubsys *ph_subsys,
786 u32 h_outstream, const u8 *pb_data, u32 bytes_to_write,
787 const struct hpi_format *p_format)
788 {
789 struct hpi_message hm;
790 struct hpi_response hr;
791 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
792 HPI_OSTREAM_WRITE);
793 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
794 hm.u.d.u.data.pb_data = (u8 *)pb_data;
795 hm.u.d.u.data.data_size = bytes_to_write;
796
797 hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
798
799 hpi_send_recv(&hm, &hr);
800
801 return hr.error;
802 }
803
804 u16 hpi_outstream_start(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
805 {
806 struct hpi_message hm;
807 struct hpi_response hr;
808 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
809 HPI_OSTREAM_START);
810 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
811
812 hpi_send_recv(&hm, &hr);
813
814 return hr.error;
815 }
816
817 u16 hpi_outstream_wait_start(const struct hpi_hsubsys *ph_subsys,
818 u32 h_outstream)
819 {
820 struct hpi_message hm;
821 struct hpi_response hr;
822 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
823 HPI_OSTREAM_WAIT_START);
824 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
825
826 hpi_send_recv(&hm, &hr);
827
828 return hr.error;
829 }
830
831 u16 hpi_outstream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
832 {
833 struct hpi_message hm;
834 struct hpi_response hr;
835 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
836 HPI_OSTREAM_STOP);
837 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
838
839 hpi_send_recv(&hm, &hr);
840
841 return hr.error;
842 }
843
844 u16 hpi_outstream_sinegen(const struct hpi_hsubsys *ph_subsys,
845 u32 h_outstream)
846 {
847 struct hpi_message hm;
848 struct hpi_response hr;
849 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
850 HPI_OSTREAM_SINEGEN);
851 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
852
853 hpi_send_recv(&hm, &hr);
854
855 return hr.error;
856 }
857
858 u16 hpi_outstream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
859 {
860 struct hpi_message hm;
861 struct hpi_response hr;
862 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
863 HPI_OSTREAM_RESET);
864 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
865
866 hpi_send_recv(&hm, &hr);
867
868 return hr.error;
869 }
870
871 u16 hpi_outstream_query_format(const struct hpi_hsubsys *ph_subsys,
872 u32 h_outstream, struct hpi_format *p_format)
873 {
874 struct hpi_message hm;
875 struct hpi_response hr;
876
877 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
878 HPI_OSTREAM_QUERY_FORMAT);
879 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
880
881 hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
882
883 hpi_send_recv(&hm, &hr);
884
885 return hr.error;
886 }
887
888 u16 hpi_outstream_set_format(const struct hpi_hsubsys *ph_subsys,
889 u32 h_outstream, struct hpi_format *p_format)
890 {
891 struct hpi_message hm;
892 struct hpi_response hr;
893
894 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
895 HPI_OSTREAM_SET_FORMAT);
896 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
897
898 hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
899
900 hpi_send_recv(&hm, &hr);
901
902 return hr.error;
903 }
904
905 u16 hpi_outstream_set_velocity(const struct hpi_hsubsys *ph_subsys,
906 u32 h_outstream, short velocity)
907 {
908 struct hpi_message hm;
909 struct hpi_response hr;
910
911 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
912 HPI_OSTREAM_SET_VELOCITY);
913 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
914 hm.u.d.u.velocity = velocity;
915
916 hpi_send_recv(&hm, &hr);
917
918 return hr.error;
919 }
920
921 u16 hpi_outstream_set_punch_in_out(const struct hpi_hsubsys *ph_subsys,
922 u32 h_outstream, u32 punch_in_sample, u32 punch_out_sample)
923 {
924 struct hpi_message hm;
925 struct hpi_response hr;
926
927 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
928 HPI_OSTREAM_SET_PUNCHINOUT);
929 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
930
931 hm.u.d.u.pio.punch_in_sample = punch_in_sample;
932 hm.u.d.u.pio.punch_out_sample = punch_out_sample;
933
934 hpi_send_recv(&hm, &hr);
935
936 return hr.error;
937 }
938
939 u16 hpi_outstream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
940 u32 h_outstream, u16 mode)
941 {
942 struct hpi_message hm;
943 struct hpi_response hr;
944
945 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
946 HPI_OSTREAM_ANC_RESET);
947 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
948 hm.u.d.u.data.format.channels = mode;
949 hpi_send_recv(&hm, &hr);
950 return hr.error;
951 }
952
953 u16 hpi_outstream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
954 u32 h_outstream, u32 *pframes_available)
955 {
956 struct hpi_message hm;
957 struct hpi_response hr;
958
959 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
960 HPI_OSTREAM_ANC_GET_INFO);
961 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
962 hpi_send_recv(&hm, &hr);
963 if (hr.error == 0) {
964 if (pframes_available)
965 *pframes_available =
966 hr.u.d.u.stream_info.data_available /
967 sizeof(struct hpi_anc_frame);
968 }
969 return hr.error;
970 }
971
972 u16 hpi_outstream_ancillary_read(const struct hpi_hsubsys *ph_subsys,
973 u32 h_outstream, struct hpi_anc_frame *p_anc_frame_buffer,
974 u32 anc_frame_buffer_size_in_bytes,
975 u32 number_of_ancillary_frames_to_read)
976 {
977 struct hpi_message hm;
978 struct hpi_response hr;
979 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
980 HPI_OSTREAM_ANC_READ);
981 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
982 hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
983 hm.u.d.u.data.data_size =
984 number_of_ancillary_frames_to_read *
985 sizeof(struct hpi_anc_frame);
986 if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
987 hpi_send_recv(&hm, &hr);
988 else
989 hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
990 return hr.error;
991 }
992
993 u16 hpi_outstream_set_time_scale(const struct hpi_hsubsys *ph_subsys,
994 u32 h_outstream, u32 time_scale)
995 {
996 struct hpi_message hm;
997 struct hpi_response hr;
998
999 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1000 HPI_OSTREAM_SET_TIMESCALE);
1001 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1002
1003 hm.u.d.u.time_scale = time_scale;
1004
1005 hpi_send_recv(&hm, &hr);
1006
1007 return hr.error;
1008 }
1009
1010 u16 hpi_outstream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1011 u32 h_outstream, u32 size_in_bytes)
1012 {
1013 struct hpi_message hm;
1014 struct hpi_response hr;
1015
1016 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1017 HPI_OSTREAM_HOSTBUFFER_ALLOC);
1018 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1019 hm.u.d.u.data.data_size = size_in_bytes;
1020 hpi_send_recv(&hm, &hr);
1021 return hr.error;
1022 }
1023
1024 u16 hpi_outstream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1025 u32 h_outstream, u8 **pp_buffer,
1026 struct hpi_hostbuffer_status **pp_status)
1027 {
1028 struct hpi_message hm;
1029 struct hpi_response hr;
1030
1031 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1032 HPI_OSTREAM_HOSTBUFFER_GET_INFO);
1033 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1034 hpi_send_recv(&hm, &hr);
1035
1036 if (hr.error == 0) {
1037 if (pp_buffer)
1038 *pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1039 if (pp_status)
1040 *pp_status = hr.u.d.u.hostbuffer_info.p_status;
1041 }
1042 return hr.error;
1043 }
1044
1045 u16 hpi_outstream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1046 u32 h_outstream)
1047 {
1048 struct hpi_message hm;
1049 struct hpi_response hr;
1050
1051 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1052 HPI_OSTREAM_HOSTBUFFER_FREE);
1053 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1054 hpi_send_recv(&hm, &hr);
1055 return hr.error;
1056 }
1057
1058 u16 hpi_outstream_group_add(const struct hpi_hsubsys *ph_subsys,
1059 u32 h_outstream, u32 h_stream)
1060 {
1061 struct hpi_message hm;
1062 struct hpi_response hr;
1063 u16 adapter;
1064 char c_obj_type;
1065
1066 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1067 HPI_OSTREAM_GROUP_ADD);
1068 hr.error = 0;
1069 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1070 c_obj_type = hpi_handle_object(h_stream);
1071 switch (c_obj_type) {
1072 case HPI_OBJ_OSTREAM:
1073 hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1074 u32TOINDEXES(h_stream, &adapter,
1075 &hm.u.d.u.stream.stream_index);
1076 break;
1077 case HPI_OBJ_ISTREAM:
1078 hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1079 u32TOINDEXES(h_stream, &adapter,
1080 &hm.u.d.u.stream.stream_index);
1081 break;
1082 default:
1083 return HPI_ERROR_INVALID_STREAM;
1084 }
1085 if (adapter != hm.adapter_index)
1086 return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1087
1088 hpi_send_recv(&hm, &hr);
1089 return hr.error;
1090 }
1091
1092 u16 hpi_outstream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1093 u32 h_outstream, u32 *poutstream_map, u32 *pinstream_map)
1094 {
1095 struct hpi_message hm;
1096 struct hpi_response hr;
1097
1098 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1099 HPI_OSTREAM_GROUP_GETMAP);
1100 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1101 hpi_send_recv(&hm, &hr);
1102
1103 if (poutstream_map)
1104 *poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1105 if (pinstream_map)
1106 *pinstream_map = hr.u.d.u.group_info.instream_group_map;
1107
1108 return hr.error;
1109 }
1110
1111 u16 hpi_outstream_group_reset(const struct hpi_hsubsys *ph_subsys,
1112 u32 h_outstream)
1113 {
1114 struct hpi_message hm;
1115 struct hpi_response hr;
1116
1117 hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1118 HPI_OSTREAM_GROUP_RESET);
1119 u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1120 hpi_send_recv(&hm, &hr);
1121 return hr.error;
1122 }
1123
1124 u16 hpi_instream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1125 u16 instream_index, u32 *ph_instream)
1126 {
1127 struct hpi_message hm;
1128 struct hpi_response hr;
1129
1130 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1131 HPI_ISTREAM_OPEN);
1132 hm.adapter_index = adapter_index;
1133 hm.obj_index = instream_index;
1134
1135 hpi_send_recv(&hm, &hr);
1136
1137 if (hr.error == 0)
1138 *ph_instream =
1139 hpi_indexes_to_handle(HPI_OBJ_ISTREAM, adapter_index,
1140 instream_index);
1141 else
1142 *ph_instream = 0;
1143
1144 return hr.error;
1145 }
1146
1147 u16 hpi_instream_close(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1148 {
1149 struct hpi_message hm;
1150 struct hpi_response hr;
1151
1152 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1153 HPI_ISTREAM_HOSTBUFFER_FREE);
1154 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1155 hpi_send_recv(&hm, &hr);
1156
1157 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1158 HPI_ISTREAM_GROUP_RESET);
1159 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1160 hpi_send_recv(&hm, &hr);
1161
1162 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1163 HPI_ISTREAM_CLOSE);
1164 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1165 hpi_send_recv(&hm, &hr);
1166
1167 return hr.error;
1168 }
1169
1170 u16 hpi_instream_query_format(const struct hpi_hsubsys *ph_subsys,
1171 u32 h_instream, const struct hpi_format *p_format)
1172 {
1173 struct hpi_message hm;
1174 struct hpi_response hr;
1175
1176 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1177 HPI_ISTREAM_QUERY_FORMAT);
1178 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1179 hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1180
1181 hpi_send_recv(&hm, &hr);
1182
1183 return hr.error;
1184 }
1185
1186 u16 hpi_instream_set_format(const struct hpi_hsubsys *ph_subsys,
1187 u32 h_instream, const struct hpi_format *p_format)
1188 {
1189 struct hpi_message hm;
1190 struct hpi_response hr;
1191
1192 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1193 HPI_ISTREAM_SET_FORMAT);
1194 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1195 hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1196
1197 hpi_send_recv(&hm, &hr);
1198
1199 return hr.error;
1200 }
1201
1202 u16 hpi_instream_read_buf(const struct hpi_hsubsys *ph_subsys, u32 h_instream,
1203 u8 *pb_data, u32 bytes_to_read)
1204 {
1205 struct hpi_message hm;
1206 struct hpi_response hr;
1207
1208 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1209 HPI_ISTREAM_READ);
1210 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1211 hm.u.d.u.data.data_size = bytes_to_read;
1212 hm.u.d.u.data.pb_data = pb_data;
1213
1214 hpi_send_recv(&hm, &hr);
1215
1216 return hr.error;
1217 }
1218
1219 u16 hpi_instream_start(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1220 {
1221 struct hpi_message hm;
1222 struct hpi_response hr;
1223
1224 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1225 HPI_ISTREAM_START);
1226 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1227
1228 hpi_send_recv(&hm, &hr);
1229
1230 return hr.error;
1231 }
1232
1233 u16 hpi_instream_wait_start(const struct hpi_hsubsys *ph_subsys,
1234 u32 h_instream)
1235 {
1236 struct hpi_message hm;
1237 struct hpi_response hr;
1238
1239 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1240 HPI_ISTREAM_WAIT_START);
1241 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1242
1243 hpi_send_recv(&hm, &hr);
1244
1245 return hr.error;
1246 }
1247
1248 u16 hpi_instream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1249 {
1250 struct hpi_message hm;
1251 struct hpi_response hr;
1252
1253 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1254 HPI_ISTREAM_STOP);
1255 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1256
1257 hpi_send_recv(&hm, &hr);
1258
1259 return hr.error;
1260 }
1261
1262 u16 hpi_instream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1263 {
1264 struct hpi_message hm;
1265 struct hpi_response hr;
1266
1267 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1268 HPI_ISTREAM_RESET);
1269 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1270
1271 hpi_send_recv(&hm, &hr);
1272
1273 return hr.error;
1274 }
1275
1276 u16 hpi_instream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
1277 u32 h_instream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_recorded,
1278 u32 *psamples_recorded, u32 *pauxiliary_data_recorded)
1279 {
1280 struct hpi_message hm;
1281 struct hpi_response hr;
1282 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1283 HPI_ISTREAM_GET_INFO);
1284 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1285
1286 hpi_send_recv(&hm, &hr);
1287
1288 if (pw_state)
1289 *pw_state = hr.u.d.u.stream_info.state;
1290 if (pbuffer_size)
1291 *pbuffer_size = hr.u.d.u.stream_info.buffer_size;
1292 if (pdata_recorded)
1293 *pdata_recorded = hr.u.d.u.stream_info.data_available;
1294 if (psamples_recorded)
1295 *psamples_recorded = hr.u.d.u.stream_info.samples_transferred;
1296 if (pauxiliary_data_recorded)
1297 *pauxiliary_data_recorded =
1298 hr.u.d.u.stream_info.auxiliary_data_available;
1299 return hr.error;
1300 }
1301
1302 u16 hpi_instream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
1303 u32 h_instream, u16 bytes_per_frame, u16 mode, u16 alignment,
1304 u16 idle_bit)
1305 {
1306 struct hpi_message hm;
1307 struct hpi_response hr;
1308 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1309 HPI_ISTREAM_ANC_RESET);
1310 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1311 hm.u.d.u.data.format.attributes = bytes_per_frame;
1312 hm.u.d.u.data.format.format = (mode << 8) | (alignment & 0xff);
1313 hm.u.d.u.data.format.channels = idle_bit;
1314 hpi_send_recv(&hm, &hr);
1315 return hr.error;
1316 }
1317
1318 u16 hpi_instream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
1319 u32 h_instream, u32 *pframe_space)
1320 {
1321 struct hpi_message hm;
1322 struct hpi_response hr;
1323 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1324 HPI_ISTREAM_ANC_GET_INFO);
1325 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1326 hpi_send_recv(&hm, &hr);
1327 if (pframe_space)
1328 *pframe_space =
1329 (hr.u.d.u.stream_info.buffer_size -
1330 hr.u.d.u.stream_info.data_available) /
1331 sizeof(struct hpi_anc_frame);
1332 return hr.error;
1333 }
1334
1335 u16 hpi_instream_ancillary_write(const struct hpi_hsubsys *ph_subsys,
1336 u32 h_instream, const struct hpi_anc_frame *p_anc_frame_buffer,
1337 u32 anc_frame_buffer_size_in_bytes,
1338 u32 number_of_ancillary_frames_to_write)
1339 {
1340 struct hpi_message hm;
1341 struct hpi_response hr;
1342
1343 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1344 HPI_ISTREAM_ANC_WRITE);
1345 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1346 hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
1347 hm.u.d.u.data.data_size =
1348 number_of_ancillary_frames_to_write *
1349 sizeof(struct hpi_anc_frame);
1350 if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
1351 hpi_send_recv(&hm, &hr);
1352 else
1353 hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
1354 return hr.error;
1355 }
1356
1357 u16 hpi_instream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1358 u32 h_instream, u32 size_in_bytes)
1359 {
1360
1361 struct hpi_message hm;
1362 struct hpi_response hr;
1363
1364 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1365 HPI_ISTREAM_HOSTBUFFER_ALLOC);
1366 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1367 hm.u.d.u.data.data_size = size_in_bytes;
1368 hpi_send_recv(&hm, &hr);
1369 return hr.error;
1370 }
1371
1372 u16 hpi_instream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1373 u32 h_instream, u8 **pp_buffer,
1374 struct hpi_hostbuffer_status **pp_status)
1375 {
1376 struct hpi_message hm;
1377 struct hpi_response hr;
1378
1379 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1380 HPI_ISTREAM_HOSTBUFFER_GET_INFO);
1381 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1382 hpi_send_recv(&hm, &hr);
1383
1384 if (hr.error == 0) {
1385 if (pp_buffer)
1386 *pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1387 if (pp_status)
1388 *pp_status = hr.u.d.u.hostbuffer_info.p_status;
1389 }
1390 return hr.error;
1391 }
1392
1393 u16 hpi_instream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1394 u32 h_instream)
1395 {
1396
1397 struct hpi_message hm;
1398 struct hpi_response hr;
1399
1400 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1401 HPI_ISTREAM_HOSTBUFFER_FREE);
1402 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1403 hpi_send_recv(&hm, &hr);
1404 return hr.error;
1405 }
1406
1407 u16 hpi_instream_group_add(const struct hpi_hsubsys *ph_subsys,
1408 u32 h_instream, u32 h_stream)
1409 {
1410 struct hpi_message hm;
1411 struct hpi_response hr;
1412 u16 adapter;
1413 char c_obj_type;
1414
1415 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1416 HPI_ISTREAM_GROUP_ADD);
1417 hr.error = 0;
1418 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1419 c_obj_type = hpi_handle_object(h_stream);
1420
1421 switch (c_obj_type) {
1422 case HPI_OBJ_OSTREAM:
1423 hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1424 u32TOINDEXES(h_stream, &adapter,
1425 &hm.u.d.u.stream.stream_index);
1426 break;
1427 case HPI_OBJ_ISTREAM:
1428 hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1429 u32TOINDEXES(h_stream, &adapter,
1430 &hm.u.d.u.stream.stream_index);
1431 break;
1432 default:
1433 return HPI_ERROR_INVALID_STREAM;
1434 }
1435
1436 if (adapter != hm.adapter_index)
1437 return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1438
1439 hpi_send_recv(&hm, &hr);
1440 return hr.error;
1441 }
1442
1443 u16 hpi_instream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1444 u32 h_instream, u32 *poutstream_map, u32 *pinstream_map)
1445 {
1446 struct hpi_message hm;
1447 struct hpi_response hr;
1448
1449 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1450 HPI_ISTREAM_HOSTBUFFER_FREE);
1451 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1452 hpi_send_recv(&hm, &hr);
1453
1454 if (poutstream_map)
1455 *poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1456 if (pinstream_map)
1457 *pinstream_map = hr.u.d.u.group_info.instream_group_map;
1458
1459 return hr.error;
1460 }
1461
1462 u16 hpi_instream_group_reset(const struct hpi_hsubsys *ph_subsys,
1463 u32 h_instream)
1464 {
1465 struct hpi_message hm;
1466 struct hpi_response hr;
1467
1468 hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1469 HPI_ISTREAM_GROUP_RESET);
1470 u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1471 hpi_send_recv(&hm, &hr);
1472 return hr.error;
1473 }
1474
1475 u16 hpi_mixer_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1476 u32 *ph_mixer)
1477 {
1478 struct hpi_message hm;
1479 struct hpi_response hr;
1480 hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_OPEN);
1481 hm.adapter_index = adapter_index;
1482
1483 hpi_send_recv(&hm, &hr);
1484
1485 if (hr.error == 0)
1486 *ph_mixer =
1487 hpi_indexes_to_handle(HPI_OBJ_MIXER, adapter_index,
1488 0);
1489 else
1490 *ph_mixer = 0;
1491 return hr.error;
1492 }
1493
1494 u16 hpi_mixer_close(const struct hpi_hsubsys *ph_subsys, u32 h_mixer)
1495 {
1496 struct hpi_message hm;
1497 struct hpi_response hr;
1498 hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_CLOSE);
1499 u32TOINDEX(h_mixer, &hm.adapter_index);
1500 hpi_send_recv(&hm, &hr);
1501 return hr.error;
1502 }
1503
1504 u16 hpi_mixer_get_control(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1505 u16 src_node_type, u16 src_node_type_index, u16 dst_node_type,
1506 u16 dst_node_type_index, u16 control_type, u32 *ph_control)
1507 {
1508 struct hpi_message hm;
1509 struct hpi_response hr;
1510 hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1511 HPI_MIXER_GET_CONTROL);
1512 u32TOINDEX(h_mixer, &hm.adapter_index);
1513 hm.u.m.node_type1 = src_node_type;
1514 hm.u.m.node_index1 = src_node_type_index;
1515 hm.u.m.node_type2 = dst_node_type;
1516 hm.u.m.node_index2 = dst_node_type_index;
1517 hm.u.m.control_type = control_type;
1518
1519 hpi_send_recv(&hm, &hr);
1520
1521 if (hr.error == 0)
1522 *ph_control =
1523 hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1524 hm.adapter_index, hr.u.m.control_index);
1525 else
1526 *ph_control = 0;
1527 return hr.error;
1528 }
1529
1530 u16 hpi_mixer_get_control_by_index(const struct hpi_hsubsys *ph_subsys,
1531 u32 h_mixer, u16 control_index, u16 *pw_src_node_type,
1532 u16 *pw_src_node_index, u16 *pw_dst_node_type, u16 *pw_dst_node_index,
1533 u16 *pw_control_type, u32 *ph_control)
1534 {
1535 struct hpi_message hm;
1536 struct hpi_response hr;
1537 hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1538 HPI_MIXER_GET_CONTROL_BY_INDEX);
1539 u32TOINDEX(h_mixer, &hm.adapter_index);
1540 hm.u.m.control_index = control_index;
1541 hpi_send_recv(&hm, &hr);
1542
1543 if (pw_src_node_type) {
1544 *pw_src_node_type =
1545 hr.u.m.src_node_type + HPI_SOURCENODE_NONE;
1546 *pw_src_node_index = hr.u.m.src_node_index;
1547 *pw_dst_node_type = hr.u.m.dst_node_type + HPI_DESTNODE_NONE;
1548 *pw_dst_node_index = hr.u.m.dst_node_index;
1549 }
1550 if (pw_control_type)
1551 *pw_control_type = hr.u.m.control_index;
1552
1553 if (ph_control) {
1554 if (hr.error == 0)
1555 *ph_control =
1556 hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1557 hm.adapter_index, control_index);
1558 else
1559 *ph_control = 0;
1560 }
1561 return hr.error;
1562 }
1563
1564 u16 hpi_mixer_store(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1565 enum HPI_MIXER_STORE_COMMAND command, u16 index)
1566 {
1567 struct hpi_message hm;
1568 struct hpi_response hr;
1569 hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_STORE);
1570 u32TOINDEX(h_mixer, &hm.adapter_index);
1571 hm.u.mx.store.command = command;
1572 hm.u.mx.store.index = index;
1573 hpi_send_recv(&hm, &hr);
1574 return hr.error;
1575 }
1576
1577 static
1578 u16 hpi_control_param_set(const struct hpi_hsubsys *ph_subsys,
1579 const u32 h_control, const u16 attrib, const u32 param1,
1580 const u32 param2)
1581 {
1582 struct hpi_message hm;
1583 struct hpi_response hr;
1584 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1585 HPI_CONTROL_SET_STATE);
1586 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1587 hm.u.c.attribute = attrib;
1588 hm.u.c.param1 = param1;
1589 hm.u.c.param2 = param2;
1590 hpi_send_recv(&hm, &hr);
1591 return hr.error;
1592 }
1593
1594 static
1595 u16 hpi_control_param_get(const struct hpi_hsubsys *ph_subsys,
1596 const u32 h_control, const u16 attrib, u32 param1, u32 param2,
1597 u32 *pparam1, u32 *pparam2)
1598 {
1599 struct hpi_message hm;
1600 struct hpi_response hr;
1601 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1602 HPI_CONTROL_GET_STATE);
1603 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1604 hm.u.c.attribute = attrib;
1605 hm.u.c.param1 = param1;
1606 hm.u.c.param2 = param2;
1607 hpi_send_recv(&hm, &hr);
1608 if (pparam1)
1609 *pparam1 = hr.u.c.param1;
1610 if (pparam2)
1611 *pparam2 = hr.u.c.param2;
1612
1613 return hr.error;
1614 }
1615
1616 #define hpi_control_param1_get(s, h, a, p1) \
1617 hpi_control_param_get(s, h, a, 0, 0, p1, NULL)
1618 #define hpi_control_param2_get(s, h, a, p1, p2) \
1619 hpi_control_param_get(s, h, a, 0, 0, p1, p2)
1620 #define hpi_control_ex_param1_get(s, h, a, p1) \
1621 hpi_control_ex_param_get(s, h, a, 0, 0, p1, NULL)
1622 #define hpi_control_ex_param2_get(s, h, a, p1, p2) \
1623 hpi_control_ex_param_get(s, h, a, 0, 0, p1, p2)
1624
1625 static
1626 u16 hpi_control_query(const struct hpi_hsubsys *ph_subsys,
1627 const u32 h_control, const u16 attrib, const u32 index,
1628 const u32 param, u32 *psetting)
1629 {
1630 struct hpi_message hm;
1631 struct hpi_response hr;
1632 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1633 HPI_CONTROL_GET_INFO);
1634 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1635
1636 hm.u.c.attribute = attrib;
1637 hm.u.c.param1 = index;
1638 hm.u.c.param2 = param;
1639
1640 hpi_send_recv(&hm, &hr);
1641 *psetting = hr.u.c.param1;
1642
1643 return hr.error;
1644 }
1645
1646 static u16 hpi_control_get_string(const struct hpi_hsubsys *ph_subsys,
1647 const u32 h_control, const u16 attribute, char *psz_string,
1648 const u32 string_length)
1649 {
1650 unsigned int sub_string_index = 0, j = 0;
1651 char c = 0;
1652 unsigned int n = 0;
1653 u16 hE = 0;
1654
1655 if ((string_length < 1) || (string_length > 256))
1656 return HPI_ERROR_INVALID_CONTROL_VALUE;
1657 for (sub_string_index = 0; sub_string_index < string_length;
1658 sub_string_index += 8) {
1659 struct hpi_message hm;
1660 struct hpi_response hr;
1661
1662 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1663 HPI_CONTROL_GET_STATE);
1664 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1665 hm.u.c.attribute = attribute;
1666 hm.u.c.param1 = sub_string_index;
1667 hm.u.c.param2 = 0;
1668 hpi_send_recv(&hm, &hr);
1669
1670 if (sub_string_index == 0
1671 && (hr.u.cu.chars8.remaining_chars + 8) >
1672 string_length)
1673 return HPI_ERROR_INVALID_CONTROL_VALUE;
1674
1675 if (hr.error) {
1676 hE = hr.error;
1677 break;
1678 }
1679 for (j = 0; j < 8; j++) {
1680 c = hr.u.cu.chars8.sz_data[j];
1681 psz_string[sub_string_index + j] = c;
1682 n++;
1683 if (n >= string_length) {
1684 psz_string[string_length - 1] = 0;
1685 hE = HPI_ERROR_INVALID_CONTROL_VALUE;
1686 break;
1687 }
1688 if (c == 0)
1689 break;
1690 }
1691
1692 if ((hr.u.cu.chars8.remaining_chars == 0)
1693 && ((sub_string_index + j) < string_length)
1694 && (c != 0)) {
1695 c = 0;
1696 psz_string[sub_string_index + j] = c;
1697 }
1698 if (c == 0)
1699 break;
1700 }
1701 return hE;
1702 }
1703
1704 u16 HPI_AESEBU__receiver_query_format(const struct hpi_hsubsys *ph_subsys,
1705 const u32 h_aes_rx, const u32 index, u16 *pw_format)
1706 {
1707 u32 qr;
1708 u16 err;
1709
1710 err = hpi_control_query(ph_subsys, h_aes_rx, HPI_AESEBURX_FORMAT,
1711 index, 0, &qr);
1712 *pw_format = (u16)qr;
1713 return err;
1714 }
1715
1716 u16 HPI_AESEBU__receiver_set_format(const struct hpi_hsubsys *ph_subsys,
1717 u32 h_control, u16 format)
1718 {
1719 return hpi_control_param_set(ph_subsys, h_control,
1720 HPI_AESEBURX_FORMAT, format, 0);
1721 }
1722
1723 u16 HPI_AESEBU__receiver_get_format(const struct hpi_hsubsys *ph_subsys,
1724 u32 h_control, u16 *pw_format)
1725 {
1726 u16 err;
1727 u32 param;
1728
1729 err = hpi_control_param1_get(ph_subsys, h_control,
1730 HPI_AESEBURX_FORMAT, &param);
1731 if (!err && pw_format)
1732 *pw_format = (u16)param;
1733
1734 return err;
1735 }
1736
1737 u16 HPI_AESEBU__receiver_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
1738 u32 h_control, u32 *psample_rate)
1739 {
1740 return hpi_control_param1_get(ph_subsys, h_control,
1741 HPI_AESEBURX_SAMPLERATE, psample_rate);
1742 }
1743
1744 u16 HPI_AESEBU__receiver_get_user_data(const struct hpi_hsubsys *ph_subsys,
1745 u32 h_control, u16 index, u16 *pw_data)
1746 {
1747 struct hpi_message hm;
1748 struct hpi_response hr;
1749 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1750 HPI_CONTROL_GET_STATE);
1751 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1752 hm.u.c.attribute = HPI_AESEBURX_USERDATA;
1753 hm.u.c.param1 = index;
1754
1755 hpi_send_recv(&hm, &hr);
1756
1757 if (pw_data)
1758 *pw_data = (u16)hr.u.c.param2;
1759 return hr.error;
1760 }
1761
1762 u16 HPI_AESEBU__receiver_get_channel_status(const struct hpi_hsubsys
1763 *ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1764 {
1765 struct hpi_message hm;
1766 struct hpi_response hr;
1767 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1768 HPI_CONTROL_GET_STATE);
1769 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1770 hm.u.c.attribute = HPI_AESEBURX_CHANNELSTATUS;
1771 hm.u.c.param1 = index;
1772
1773 hpi_send_recv(&hm, &hr);
1774
1775 if (pw_data)
1776 *pw_data = (u16)hr.u.c.param2;
1777 return hr.error;
1778 }
1779
1780 u16 HPI_AESEBU__receiver_get_error_status(const struct hpi_hsubsys *ph_subsys,
1781 u32 h_control, u16 *pw_error_data)
1782 {
1783 u32 error_data = 0;
1784 u16 error = 0;
1785
1786 error = hpi_control_param1_get(ph_subsys, h_control,
1787 HPI_AESEBURX_ERRORSTATUS, &error_data);
1788 if (pw_error_data)
1789 *pw_error_data = (u16)error_data;
1790 return error;
1791 }
1792
1793 u16 HPI_AESEBU__transmitter_set_sample_rate(const struct hpi_hsubsys
1794 *ph_subsys, u32 h_control, u32 sample_rate)
1795 {
1796 return hpi_control_param_set(ph_subsys, h_control,
1797 HPI_AESEBUTX_SAMPLERATE, sample_rate, 0);
1798 }
1799
1800 u16 HPI_AESEBU__transmitter_set_user_data(const struct hpi_hsubsys *ph_subsys,
1801 u32 h_control, u16 index, u16 data)
1802 {
1803 return hpi_control_param_set(ph_subsys, h_control,
1804 HPI_AESEBUTX_USERDATA, index, data);
1805 }
1806
1807 u16 HPI_AESEBU__transmitter_set_channel_status(const struct hpi_hsubsys
1808 *ph_subsys, u32 h_control, u16 index, u16 data)
1809 {
1810 return hpi_control_param_set(ph_subsys, h_control,
1811 HPI_AESEBUTX_CHANNELSTATUS, index, data);
1812 }
1813
1814 u16 HPI_AESEBU__transmitter_get_channel_status(const struct hpi_hsubsys
1815 *ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1816 {
1817 return HPI_ERROR_INVALID_OPERATION;
1818 }
1819
1820 u16 HPI_AESEBU__transmitter_query_format(const struct hpi_hsubsys *ph_subsys,
1821 const u32 h_aes_tx, const u32 index, u16 *pw_format)
1822 {
1823 u32 qr;
1824 u16 err;
1825
1826 err = hpi_control_query(ph_subsys, h_aes_tx, HPI_AESEBUTX_FORMAT,
1827 index, 0, &qr);
1828 *pw_format = (u16)qr;
1829 return err;
1830 }
1831
1832 u16 HPI_AESEBU__transmitter_set_format(const struct hpi_hsubsys *ph_subsys,
1833 u32 h_control, u16 output_format)
1834 {
1835 return hpi_control_param_set(ph_subsys, h_control,
1836 HPI_AESEBUTX_FORMAT, output_format, 0);
1837 }
1838
1839 u16 HPI_AESEBU__transmitter_get_format(const struct hpi_hsubsys *ph_subsys,
1840 u32 h_control, u16 *pw_output_format)
1841 {
1842 u16 err;
1843 u32 param;
1844
1845 err = hpi_control_param1_get(ph_subsys, h_control,
1846 HPI_AESEBUTX_FORMAT, &param);
1847 if (!err && pw_output_format)
1848 *pw_output_format = (u16)param;
1849
1850 return err;
1851 }
1852
1853 u16 hpi_bitstream_set_clock_edge(const struct hpi_hsubsys *ph_subsys,
1854 u32 h_control, u16 edge_type)
1855 {
1856 return hpi_control_param_set(ph_subsys, h_control,
1857 HPI_BITSTREAM_CLOCK_EDGE, edge_type, 0);
1858 }
1859
1860 u16 hpi_bitstream_set_data_polarity(const struct hpi_hsubsys *ph_subsys,
1861 u32 h_control, u16 polarity)
1862 {
1863 return hpi_control_param_set(ph_subsys, h_control,
1864 HPI_BITSTREAM_DATA_POLARITY, polarity, 0);
1865 }
1866
1867 u16 hpi_bitstream_get_activity(const struct hpi_hsubsys *ph_subsys,
1868 u32 h_control, u16 *pw_clk_activity, u16 *pw_data_activity)
1869 {
1870 struct hpi_message hm;
1871 struct hpi_response hr;
1872 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1873 HPI_CONTROL_GET_STATE);
1874 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1875 hm.u.c.attribute = HPI_BITSTREAM_ACTIVITY;
1876 hpi_send_recv(&hm, &hr);
1877 if (pw_clk_activity)
1878 *pw_clk_activity = (u16)hr.u.c.param1;
1879 if (pw_data_activity)
1880 *pw_data_activity = (u16)hr.u.c.param2;
1881 return hr.error;
1882 }
1883
1884 u16 hpi_channel_mode_query_mode(const struct hpi_hsubsys *ph_subsys,
1885 const u32 h_mode, const u32 index, u16 *pw_mode)
1886 {
1887 u32 qr;
1888 u16 err;
1889
1890 err = hpi_control_query(ph_subsys, h_mode, HPI_CHANNEL_MODE_MODE,
1891 index, 0, &qr);
1892 *pw_mode = (u16)qr;
1893 return err;
1894 }
1895
1896 u16 hpi_channel_mode_set(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1897 u16 mode)
1898 {
1899 return hpi_control_param_set(ph_subsys, h_control,
1900 HPI_CHANNEL_MODE_MODE, mode, 0);
1901 }
1902
1903 u16 hpi_channel_mode_get(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1904 u16 *mode)
1905 {
1906 u32 mode32 = 0;
1907 u16 error = hpi_control_param1_get(ph_subsys, h_control,
1908 HPI_CHANNEL_MODE_MODE, &mode32);
1909 if (mode)
1910 *mode = (u16)mode32;
1911 return error;
1912 }
1913
1914 u16 hpi_cobranet_hmi_write(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1915 u32 hmi_address, u32 byte_count, u8 *pb_data)
1916 {
1917 struct hpi_message hm;
1918 struct hpi_response hr;
1919 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1920 HPI_CONTROL_SET_STATE);
1921 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1922
1923 hm.u.cx.u.cobranet_data.byte_count = byte_count;
1924 hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1925
1926 if (byte_count <= 8) {
1927 memcpy(hm.u.cx.u.cobranet_data.data, pb_data, byte_count);
1928 hm.u.cx.attribute = HPI_COBRANET_SET;
1929 } else {
1930 hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1931 hm.u.cx.attribute = HPI_COBRANET_SET_DATA;
1932 }
1933
1934 hpi_send_recv(&hm, &hr);
1935
1936 return hr.error;
1937 }
1938
1939 u16 hpi_cobranet_hmi_read(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1940 u32 hmi_address, u32 max_byte_count, u32 *pbyte_count, u8 *pb_data)
1941 {
1942 struct hpi_message hm;
1943 struct hpi_response hr;
1944 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1945 HPI_CONTROL_GET_STATE);
1946 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1947
1948 hm.u.cx.u.cobranet_data.byte_count = max_byte_count;
1949 hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1950
1951 if (max_byte_count <= 8) {
1952 hm.u.cx.attribute = HPI_COBRANET_GET;
1953 } else {
1954 hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1955 hm.u.cx.attribute = HPI_COBRANET_GET_DATA;
1956 }
1957
1958 hpi_send_recv(&hm, &hr);
1959 if (!hr.error && pb_data) {
1960
1961 *pbyte_count = hr.u.cx.u.cobranet_data.byte_count;
1962
1963 if (*pbyte_count < max_byte_count)
1964 max_byte_count = *pbyte_count;
1965
1966 if (hm.u.cx.attribute == HPI_COBRANET_GET) {
1967 memcpy(pb_data, hr.u.cx.u.cobranet_data.data,
1968 max_byte_count);
1969 } else {
1970
1971 }
1972
1973 }
1974 return hr.error;
1975 }
1976
1977 u16 hpi_cobranet_hmi_get_status(const struct hpi_hsubsys *ph_subsys,
1978 u32 h_control, u32 *pstatus, u32 *preadable_size,
1979 u32 *pwriteable_size)
1980 {
1981 struct hpi_message hm;
1982 struct hpi_response hr;
1983 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1984 HPI_CONTROL_GET_STATE);
1985 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1986
1987 hm.u.cx.attribute = HPI_COBRANET_GET_STATUS;
1988
1989 hpi_send_recv(&hm, &hr);
1990 if (!hr.error) {
1991 if (pstatus)
1992 *pstatus = hr.u.cx.u.cobranet_status.status;
1993 if (preadable_size)
1994 *preadable_size =
1995 hr.u.cx.u.cobranet_status.readable_size;
1996 if (pwriteable_size)
1997 *pwriteable_size =
1998 hr.u.cx.u.cobranet_status.writeable_size;
1999 }
2000 return hr.error;
2001 }
2002
2003 u16 hpi_cobranet_getI_paddress(const struct hpi_hsubsys *ph_subsys,
2004 u32 h_control, u32 *pi_paddress)
2005 {
2006 u32 byte_count;
2007 u32 iP;
2008 u16 error;
2009 error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2010 HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, &byte_count,
2011 (u8 *)&iP);
2012
2013 *pi_paddress =
2014 ((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2015 0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2016
2017 if (error)
2018 *pi_paddress = 0;
2019
2020 return error;
2021
2022 }
2023
2024 u16 hpi_cobranet_setI_paddress(const struct hpi_hsubsys *ph_subsys,
2025 u32 h_control, u32 i_paddress)
2026 {
2027 u32 iP;
2028 u16 error;
2029
2030 iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2031 8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2032 0x000000ff) << 8);
2033
2034 error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2035 HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, (u8 *)&iP);
2036
2037 return error;
2038
2039 }
2040
2041 u16 hpi_cobranet_get_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2042 u32 h_control, u32 *pi_paddress)
2043 {
2044 u32 byte_count;
2045 u32 iP;
2046 u16 error;
2047 error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2048 HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, &byte_count,
2049 (u8 *)&iP);
2050
2051 *pi_paddress =
2052 ((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2053 0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2054
2055 if (error)
2056 *pi_paddress = 0;
2057
2058 return error;
2059
2060 }
2061
2062 u16 hpi_cobranet_set_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2063 u32 h_control, u32 i_paddress)
2064 {
2065 u32 iP;
2066 u16 error;
2067
2068 iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2069 8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2070 0x000000ff) << 8);
2071
2072 error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2073 HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, (u8 *)&iP);
2074
2075 return error;
2076
2077 }
2078
2079 u16 hpi_cobranet_getMA_caddress(const struct hpi_hsubsys *ph_subsys,
2080 u32 h_control, u32 *pmAC_MS_bs, u32 *pmAC_LS_bs)
2081 {
2082 u32 byte_count;
2083 u16 error;
2084 u32 mAC;
2085 error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2086 HPI_COBRANET_HMI_cobra_if_phy_address, 4, &byte_count,
2087 (u8 *)&mAC);
2088 *pmAC_MS_bs =
2089 ((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2090 & 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2091 error += hpi_cobranet_hmi_read(ph_subsys, h_control,
2092 HPI_COBRANET_HMI_cobra_if_phy_address + 1, 4, &byte_count,
2093 (u8 *)&mAC);
2094 *pmAC_LS_bs =
2095 ((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2096 & 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2097
2098 if (error) {
2099 *pmAC_MS_bs = 0;
2100 *pmAC_LS_bs = 0;
2101 }
2102
2103 return error;
2104 }
2105
2106 u16 hpi_compander_set(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2107 u16 attack, u16 decay, short ratio100, short threshold0_01dB,
2108 short makeup_gain0_01dB)
2109 {
2110 struct hpi_message hm;
2111 struct hpi_response hr;
2112 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2113 HPI_CONTROL_SET_STATE);
2114 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2115
2116 hm.u.c.param1 = attack + ((u32)ratio100 << 16);
2117 hm.u.c.param2 = (decay & 0xFFFFL);
2118 hm.u.c.an_log_value[0] = threshold0_01dB;
2119 hm.u.c.an_log_value[1] = makeup_gain0_01dB;
2120 hm.u.c.attribute = HPI_COMPANDER_PARAMS;
2121
2122 hpi_send_recv(&hm, &hr);
2123
2124 return hr.error;
2125 }
2126
2127 u16 hpi_compander_get(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2128 u16 *pw_attack, u16 *pw_decay, short *pw_ratio100,
2129 short *pn_threshold0_01dB, short *pn_makeup_gain0_01dB)
2130 {
2131 struct hpi_message hm;
2132 struct hpi_response hr;
2133 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2134 HPI_CONTROL_GET_STATE);
2135 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2136 hm.u.c.attribute = HPI_COMPANDER_PARAMS;
2137
2138 hpi_send_recv(&hm, &hr);
2139
2140 if (pw_attack)
2141 *pw_attack = (short)(hr.u.c.param1 & 0xFFFF);
2142 if (pw_decay)
2143 *pw_decay = (short)(hr.u.c.param2 & 0xFFFF);
2144 if (pw_ratio100)
2145 *pw_ratio100 = (short)(hr.u.c.param1 >> 16);
2146
2147 if (pn_threshold0_01dB)
2148 *pn_threshold0_01dB = hr.u.c.an_log_value[0];
2149 if (pn_makeup_gain0_01dB)
2150 *pn_makeup_gain0_01dB = hr.u.c.an_log_value[1];
2151
2152 return hr.error;
2153 }
2154
2155 u16 hpi_level_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2156 short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
2157 {
2158 struct hpi_message hm;
2159 struct hpi_response hr;
2160 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2161 HPI_CONTROL_GET_STATE);
2162 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2163 hm.u.c.attribute = HPI_LEVEL_RANGE;
2164
2165 hpi_send_recv(&hm, &hr);
2166 if (hr.error) {
2167 hr.u.c.an_log_value[0] = 0;
2168 hr.u.c.an_log_value[1] = 0;
2169 hr.u.c.param1 = 0;
2170 }
2171 if (min_gain_01dB)
2172 *min_gain_01dB = hr.u.c.an_log_value[0];
2173 if (max_gain_01dB)
2174 *max_gain_01dB = hr.u.c.an_log_value[1];
2175 if (step_gain_01dB)
2176 *step_gain_01dB = (short)hr.u.c.param1;
2177 return hr.error;
2178 }
2179
2180 u16 hpi_level_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2181 short an_gain0_01dB[HPI_MAX_CHANNELS]
2182 )
2183 {
2184 struct hpi_message hm;
2185 struct hpi_response hr;
2186
2187 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2188 HPI_CONTROL_SET_STATE);
2189 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2190 memcpy(hm.u.c.an_log_value, an_gain0_01dB,
2191 sizeof(short) * HPI_MAX_CHANNELS);
2192 hm.u.c.attribute = HPI_LEVEL_GAIN;
2193
2194 hpi_send_recv(&hm, &hr);
2195
2196 return hr.error;
2197 }
2198
2199 u16 hpi_level_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2200 short an_gain0_01dB[HPI_MAX_CHANNELS]
2201 )
2202 {
2203 struct hpi_message hm;
2204 struct hpi_response hr;
2205 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2206 HPI_CONTROL_GET_STATE);
2207 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2208 hm.u.c.attribute = HPI_LEVEL_GAIN;
2209
2210 hpi_send_recv(&hm, &hr);
2211
2212 memcpy(an_gain0_01dB, hr.u.c.an_log_value,
2213 sizeof(short) * HPI_MAX_CHANNELS);
2214 return hr.error;
2215 }
2216
2217 u16 hpi_meter_query_channels(const struct hpi_hsubsys *ph_subsys,
2218 const u32 h_meter, u32 *p_channels)
2219 {
2220 return hpi_control_query(ph_subsys, h_meter, HPI_METER_NUM_CHANNELS,
2221 0, 0, p_channels);
2222 }
2223
2224 u16 hpi_meter_get_peak(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2225 short an_peakdB[HPI_MAX_CHANNELS]
2226 )
2227 {
2228 short i = 0;
2229
2230 struct hpi_message hm;
2231 struct hpi_response hr;
2232
2233 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2234 HPI_CONTROL_GET_STATE);
2235 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2236 hm.obj_index = hm.obj_index;
2237 hm.u.c.attribute = HPI_METER_PEAK;
2238
2239 hpi_send_recv(&hm, &hr);
2240
2241 if (!hr.error)
2242 memcpy(an_peakdB, hr.u.c.an_log_value,
2243 sizeof(short) * HPI_MAX_CHANNELS);
2244 else
2245 for (i = 0; i < HPI_MAX_CHANNELS; i++)
2246 an_peakdB[i] = HPI_METER_MINIMUM;
2247 return hr.error;
2248 }
2249
2250 u16 hpi_meter_get_rms(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2251 short an_rmsdB[HPI_MAX_CHANNELS]
2252 )
2253 {
2254 short i = 0;
2255
2256 struct hpi_message hm;
2257 struct hpi_response hr;
2258
2259 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2260 HPI_CONTROL_GET_STATE);
2261 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2262 hm.u.c.attribute = HPI_METER_RMS;
2263
2264 hpi_send_recv(&hm, &hr);
2265
2266 if (!hr.error)
2267 memcpy(an_rmsdB, hr.u.c.an_log_value,
2268 sizeof(short) * HPI_MAX_CHANNELS);
2269 else
2270 for (i = 0; i < HPI_MAX_CHANNELS; i++)
2271 an_rmsdB[i] = HPI_METER_MINIMUM;
2272
2273 return hr.error;
2274 }
2275
2276 u16 hpi_meter_set_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2277 u32 h_control, u16 attack, u16 decay)
2278 {
2279 return hpi_control_param_set(ph_subsys, h_control,
2280 HPI_METER_RMS_BALLISTICS, attack, decay);
2281 }
2282
2283 u16 hpi_meter_get_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2284 u32 h_control, u16 *pn_attack, u16 *pn_decay)
2285 {
2286 u32 attack;
2287 u32 decay;
2288 u16 error;
2289
2290 error = hpi_control_param2_get(ph_subsys, h_control,
2291 HPI_METER_RMS_BALLISTICS, &attack, &decay);
2292
2293 if (pn_attack)
2294 *pn_attack = (unsigned short)attack;
2295 if (pn_decay)
2296 *pn_decay = (unsigned short)decay;
2297
2298 return error;
2299 }
2300
2301 u16 hpi_meter_set_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2302 u32 h_control, u16 attack, u16 decay)
2303 {
2304 return hpi_control_param_set(ph_subsys, h_control,
2305 HPI_METER_PEAK_BALLISTICS, attack, decay);
2306 }
2307
2308 u16 hpi_meter_get_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2309 u32 h_control, u16 *pn_attack, u16 *pn_decay)
2310 {
2311 u32 attack;
2312 u32 decay;
2313 u16 error;
2314
2315 error = hpi_control_param2_get(ph_subsys, h_control,
2316 HPI_METER_PEAK_BALLISTICS, &attack, &decay);
2317
2318 if (pn_attack)
2319 *pn_attack = (short)attack;
2320 if (pn_decay)
2321 *pn_decay = (short)decay;
2322
2323 return error;
2324 }
2325
2326 u16 hpi_microphone_set_phantom_power(const struct hpi_hsubsys *ph_subsys,
2327 u32 h_control, u16 on_off)
2328 {
2329 return hpi_control_param_set(ph_subsys, h_control,
2330 HPI_MICROPHONE_PHANTOM_POWER, (u32)on_off, 0);
2331 }
2332
2333 u16 hpi_microphone_get_phantom_power(const struct hpi_hsubsys *ph_subsys,
2334 u32 h_control, u16 *pw_on_off)
2335 {
2336 u16 error = 0;
2337 u32 on_off = 0;
2338 error = hpi_control_param1_get(ph_subsys, h_control,
2339 HPI_MICROPHONE_PHANTOM_POWER, &on_off);
2340 if (pw_on_off)
2341 *pw_on_off = (u16)on_off;
2342 return error;
2343 }
2344
2345 u16 hpi_multiplexer_set_source(const struct hpi_hsubsys *ph_subsys,
2346 u32 h_control, u16 source_node_type, u16 source_node_index)
2347 {
2348 return hpi_control_param_set(ph_subsys, h_control,
2349 HPI_MULTIPLEXER_SOURCE, source_node_type, source_node_index);
2350 }
2351
2352 u16 hpi_multiplexer_get_source(const struct hpi_hsubsys *ph_subsys,
2353 u32 h_control, u16 *source_node_type, u16 *source_node_index)
2354 {
2355 u32 node, index;
2356 u16 error = hpi_control_param2_get(ph_subsys, h_control,
2357 HPI_MULTIPLEXER_SOURCE, &node,
2358 &index);
2359 if (source_node_type)
2360 *source_node_type = (u16)node;
2361 if (source_node_index)
2362 *source_node_index = (u16)index;
2363 return error;
2364 }
2365
2366 u16 hpi_multiplexer_query_source(const struct hpi_hsubsys *ph_subsys,
2367 u32 h_control, u16 index, u16 *source_node_type,
2368 u16 *source_node_index)
2369 {
2370 struct hpi_message hm;
2371 struct hpi_response hr;
2372 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2373 HPI_CONTROL_GET_STATE);
2374 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2375 hm.u.c.attribute = HPI_MULTIPLEXER_QUERYSOURCE;
2376 hm.u.c.param1 = index;
2377
2378 hpi_send_recv(&hm, &hr);
2379
2380 if (source_node_type)
2381 *source_node_type = (u16)hr.u.c.param1;
2382 if (source_node_index)
2383 *source_node_index = (u16)hr.u.c.param2;
2384 return hr.error;
2385 }
2386
2387 u16 hpi_parametricEQ__get_info(const struct hpi_hsubsys *ph_subsys,
2388 u32 h_control, u16 *pw_number_of_bands, u16 *pw_on_off)
2389 {
2390 u32 oB = 0;
2391 u32 oO = 0;
2392 u16 error = 0;
2393
2394 error = hpi_control_param2_get(ph_subsys, h_control,
2395 HPI_EQUALIZER_NUM_FILTERS, &oO, &oB);
2396 if (pw_number_of_bands)
2397 *pw_number_of_bands = (u16)oB;
2398 if (pw_on_off)
2399 *pw_on_off = (u16)oO;
2400 return error;
2401 }
2402
2403 u16 hpi_parametricEQ__set_state(const struct hpi_hsubsys *ph_subsys,
2404 u32 h_control, u16 on_off)
2405 {
2406 return hpi_control_param_set(ph_subsys, h_control,
2407 HPI_EQUALIZER_NUM_FILTERS, on_off, 0);
2408 }
2409
2410 u16 hpi_parametricEQ__get_band(const struct hpi_hsubsys *ph_subsys,
2411 u32 h_control, u16 index, u16 *pn_type, u32 *pfrequency_hz,
2412 short *pnQ100, short *pn_gain0_01dB)
2413 {
2414 struct hpi_message hm;
2415 struct hpi_response hr;
2416 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2417 HPI_CONTROL_GET_STATE);
2418 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2419 hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2420 hm.u.c.param2 = index;
2421
2422 hpi_send_recv(&hm, &hr);
2423
2424 if (pfrequency_hz)
2425 *pfrequency_hz = hr.u.c.param1;
2426 if (pn_type)
2427 *pn_type = (u16)(hr.u.c.param2 >> 16);
2428 if (pnQ100)
2429 *pnQ100 = hr.u.c.an_log_value[1];
2430 if (pn_gain0_01dB)
2431 *pn_gain0_01dB = hr.u.c.an_log_value[0];
2432
2433 return hr.error;
2434 }
2435
2436 u16 hpi_parametricEQ__set_band(const struct hpi_hsubsys *ph_subsys,
2437 u32 h_control, u16 index, u16 type, u32 frequency_hz, short q100,
2438 short gain0_01dB)
2439 {
2440 struct hpi_message hm;
2441 struct hpi_response hr;
2442 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2443 HPI_CONTROL_SET_STATE);
2444 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2445
2446 hm.u.c.param1 = frequency_hz;
2447 hm.u.c.param2 = (index & 0xFFFFL) + ((u32)type << 16);
2448 hm.u.c.an_log_value[0] = gain0_01dB;
2449 hm.u.c.an_log_value[1] = q100;
2450 hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2451
2452 hpi_send_recv(&hm, &hr);
2453
2454 return hr.error;
2455 }
2456
2457 u16 hpi_parametricEQ__get_coeffs(const struct hpi_hsubsys *ph_subsys,
2458 u32 h_control, u16 index, short coeffs[5]
2459 )
2460 {
2461 struct hpi_message hm;
2462 struct hpi_response hr;
2463 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2464 HPI_CONTROL_GET_STATE);
2465 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2466 hm.u.c.attribute = HPI_EQUALIZER_COEFFICIENTS;
2467 hm.u.c.param2 = index;
2468
2469 hpi_send_recv(&hm, &hr);
2470
2471 coeffs[0] = (short)hr.u.c.an_log_value[0];
2472 coeffs[1] = (short)hr.u.c.an_log_value[1];
2473 coeffs[2] = (short)hr.u.c.param1;
2474 coeffs[3] = (short)(hr.u.c.param1 >> 16);
2475 coeffs[4] = (short)hr.u.c.param2;
2476
2477 return hr.error;
2478 }
2479
2480 u16 hpi_sample_clock_query_source(const struct hpi_hsubsys *ph_subsys,
2481 const u32 h_clock, const u32 index, u16 *pw_source)
2482 {
2483 u32 qr;
2484 u16 err;
2485
2486 err = hpi_control_query(ph_subsys, h_clock, HPI_SAMPLECLOCK_SOURCE,
2487 index, 0, &qr);
2488 *pw_source = (u16)qr;
2489 return err;
2490 }
2491
2492 u16 hpi_sample_clock_set_source(const struct hpi_hsubsys *ph_subsys,
2493 u32 h_control, u16 source)
2494 {
2495 return hpi_control_param_set(ph_subsys, h_control,
2496 HPI_SAMPLECLOCK_SOURCE, source, 0);
2497 }
2498
2499 u16 hpi_sample_clock_get_source(const struct hpi_hsubsys *ph_subsys,
2500 u32 h_control, u16 *pw_source)
2501 {
2502 u16 error = 0;
2503 u32 source = 0;
2504 error = hpi_control_param1_get(ph_subsys, h_control,
2505 HPI_SAMPLECLOCK_SOURCE, &source);
2506 if (!error)
2507 if (pw_source)
2508 *pw_source = (u16)source;
2509 return error;
2510 }
2511
2512 u16 hpi_sample_clock_query_source_index(const struct hpi_hsubsys *ph_subsys,
2513 const u32 h_clock, const u32 index, const u32 source,
2514 u16 *pw_source_index)
2515 {
2516 u32 qr;
2517 u16 err;
2518
2519 err = hpi_control_query(ph_subsys, h_clock,
2520 HPI_SAMPLECLOCK_SOURCE_INDEX, index, source, &qr);
2521 *pw_source_index = (u16)qr;
2522 return err;
2523 }
2524
2525 u16 hpi_sample_clock_set_source_index(const struct hpi_hsubsys *ph_subsys,
2526 u32 h_control, u16 source_index)
2527 {
2528 return hpi_control_param_set(ph_subsys, h_control,
2529 HPI_SAMPLECLOCK_SOURCE_INDEX, source_index, 0);
2530 }
2531
2532 u16 hpi_sample_clock_get_source_index(const struct hpi_hsubsys *ph_subsys,
2533 u32 h_control, u16 *pw_source_index)
2534 {
2535 u16 error = 0;
2536 u32 source_index = 0;
2537 error = hpi_control_param1_get(ph_subsys, h_control,
2538 HPI_SAMPLECLOCK_SOURCE_INDEX, &source_index);
2539 if (!error)
2540 if (pw_source_index)
2541 *pw_source_index = (u16)source_index;
2542 return error;
2543 }
2544
2545 u16 hpi_sample_clock_query_local_rate(const struct hpi_hsubsys *ph_subsys,
2546 const u32 h_clock, const u32 index, u32 *prate)
2547 {
2548 u16 err;
2549 err = hpi_control_query(ph_subsys, h_clock,
2550 HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, index, 0, prate);
2551
2552 return err;
2553 }
2554
2555 u16 hpi_sample_clock_set_local_rate(const struct hpi_hsubsys *ph_subsys,
2556 u32 h_control, u32 sample_rate)
2557 {
2558 return hpi_control_param_set(ph_subsys, h_control,
2559 HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, sample_rate, 0);
2560 }
2561
2562 u16 hpi_sample_clock_get_local_rate(const struct hpi_hsubsys *ph_subsys,
2563 u32 h_control, u32 *psample_rate)
2564 {
2565 u16 error = 0;
2566 u32 sample_rate = 0;
2567 error = hpi_control_param1_get(ph_subsys, h_control,
2568 HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, &sample_rate);
2569 if (!error)
2570 if (psample_rate)
2571 *psample_rate = sample_rate;
2572 return error;
2573 }
2574
2575 u16 hpi_sample_clock_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
2576 u32 h_control, u32 *psample_rate)
2577 {
2578 u16 error = 0;
2579 u32 sample_rate = 0;
2580 error = hpi_control_param1_get(ph_subsys, h_control,
2581 HPI_SAMPLECLOCK_SAMPLERATE, &sample_rate);
2582 if (!error)
2583 if (psample_rate)
2584 *psample_rate = sample_rate;
2585 return error;
2586 }
2587
2588 u16 hpi_sample_clock_set_auto(const struct hpi_hsubsys *ph_subsys,
2589 u32 h_control, u32 enable)
2590 {
2591 return hpi_control_param_set(ph_subsys, h_control,
2592 HPI_SAMPLECLOCK_AUTO, enable, 0);
2593 }
2594
2595 u16 hpi_sample_clock_get_auto(const struct hpi_hsubsys *ph_subsys,
2596 u32 h_control, u32 *penable)
2597 {
2598 return hpi_control_param1_get(ph_subsys, h_control,
2599 HPI_SAMPLECLOCK_AUTO, penable);
2600 }
2601
2602 u16 hpi_sample_clock_set_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2603 u32 h_control, u32 lock)
2604 {
2605 return hpi_control_param_set(ph_subsys, h_control,
2606 HPI_SAMPLECLOCK_LOCAL_LOCK, lock, 0);
2607 }
2608
2609 u16 hpi_sample_clock_get_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2610 u32 h_control, u32 *plock)
2611 {
2612 return hpi_control_param1_get(ph_subsys, h_control,
2613 HPI_SAMPLECLOCK_LOCAL_LOCK, plock);
2614 }
2615
2616 u16 hpi_tone_detector_get_frequency(const struct hpi_hsubsys *ph_subsys,
2617 u32 h_control, u32 index, u32 *frequency)
2618 {
2619 return hpi_control_param_get(ph_subsys, h_control,
2620 HPI_TONEDETECTOR_FREQUENCY, index, 0, frequency, NULL);
2621 }
2622
2623 u16 hpi_tone_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2624 u32 h_control, u32 *state)
2625 {
2626 return hpi_control_param_get(ph_subsys, h_control,
2627 HPI_TONEDETECTOR_STATE, 0, 0, (u32 *)state, NULL);
2628 }
2629
2630 u16 hpi_tone_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2631 u32 h_control, u32 enable)
2632 {
2633 return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2634 (u32)enable, 0);
2635 }
2636
2637 u16 hpi_tone_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2638 u32 h_control, u32 *enable)
2639 {
2640 return hpi_control_param_get(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2641 0, 0, (u32 *)enable, NULL);
2642 }
2643
2644 u16 hpi_tone_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2645 u32 h_control, u32 event_enable)
2646 {
2647 return hpi_control_param_set(ph_subsys, h_control,
2648 HPI_GENERIC_EVENT_ENABLE, (u32)event_enable, 0);
2649 }
2650
2651 u16 hpi_tone_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2652 u32 h_control, u32 *event_enable)
2653 {
2654 return hpi_control_param_get(ph_subsys, h_control,
2655 HPI_GENERIC_EVENT_ENABLE, 0, 0, (u32 *)event_enable, NULL);
2656 }
2657
2658 u16 hpi_tone_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2659 u32 h_control, int threshold)
2660 {
2661 return hpi_control_param_set(ph_subsys, h_control,
2662 HPI_TONEDETECTOR_THRESHOLD, (u32)threshold, 0);
2663 }
2664
2665 u16 hpi_tone_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2666 u32 h_control, int *threshold)
2667 {
2668 return hpi_control_param_get(ph_subsys, h_control,
2669 HPI_TONEDETECTOR_THRESHOLD, 0, 0, (u32 *)threshold, NULL);
2670 }
2671
2672 u16 hpi_silence_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2673 u32 h_control, u32 *state)
2674 {
2675 return hpi_control_param_get(ph_subsys, h_control,
2676 HPI_SILENCEDETECTOR_STATE, 0, 0, (u32 *)state, NULL);
2677 }
2678
2679 u16 hpi_silence_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2680 u32 h_control, u32 enable)
2681 {
2682 return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2683 (u32)enable, 0);
2684 }
2685
2686 u16 hpi_silence_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2687 u32 h_control, u32 *enable)
2688 {
2689 return hpi_control_param_get(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2690 0, 0, (u32 *)enable, NULL);
2691 }
2692
2693 u16 hpi_silence_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2694 u32 h_control, u32 event_enable)
2695 {
2696 return hpi_control_param_set(ph_subsys, h_control,
2697 HPI_GENERIC_EVENT_ENABLE, (u32)event_enable, 0);
2698 }
2699
2700 u16 hpi_silence_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2701 u32 h_control, u32 *event_enable)
2702 {
2703 return hpi_control_param_get(ph_subsys, h_control,
2704 HPI_GENERIC_EVENT_ENABLE, 0, 0, (u32 *)event_enable, NULL);
2705 }
2706
2707 u16 hpi_silence_detector_set_delay(const struct hpi_hsubsys *ph_subsys,
2708 u32 h_control, u32 delay)
2709 {
2710 return hpi_control_param_set(ph_subsys, h_control,
2711 HPI_SILENCEDETECTOR_DELAY, (u32)delay, 0);
2712 }
2713
2714 u16 hpi_silence_detector_get_delay(const struct hpi_hsubsys *ph_subsys,
2715 u32 h_control, u32 *delay)
2716 {
2717 return hpi_control_param_get(ph_subsys, h_control,
2718 HPI_SILENCEDETECTOR_DELAY, 0, 0, (u32 *)delay, NULL);
2719 }
2720
2721 u16 hpi_silence_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2722 u32 h_control, int threshold)
2723 {
2724 return hpi_control_param_set(ph_subsys, h_control,
2725 HPI_SILENCEDETECTOR_THRESHOLD, (u32)threshold, 0);
2726 }
2727
2728 u16 hpi_silence_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2729 u32 h_control, int *threshold)
2730 {
2731 return hpi_control_param_get(ph_subsys, h_control,
2732 HPI_SILENCEDETECTOR_THRESHOLD, 0, 0, (u32 *)threshold, NULL);
2733 }
2734
2735 u16 hpi_tuner_query_band(const struct hpi_hsubsys *ph_subsys,
2736 const u32 h_tuner, const u32 index, u16 *pw_band)
2737 {
2738 u32 qr;
2739 u16 err;
2740
2741 err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2742 &qr);
2743 *pw_band = (u16)qr;
2744 return err;
2745 }
2746
2747 u16 hpi_tuner_set_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2748 u16 band)
2749 {
2750 return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_BAND,
2751 band, 0);
2752 }
2753
2754 u16 hpi_tuner_get_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2755 u16 *pw_band)
2756 {
2757 u32 band = 0;
2758 u16 error = 0;
2759
2760 error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_BAND,
2761 &band);
2762 if (pw_band)
2763 *pw_band = (u16)band;
2764 return error;
2765 }
2766
2767 u16 hpi_tuner_query_frequency(const struct hpi_hsubsys *ph_subsys,
2768 const u32 h_tuner, const u32 index, const u16 band, u32 *pfreq)
2769 {
2770 return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_FREQ, index,
2771 band, pfreq);
2772 }
2773
2774 u16 hpi_tuner_set_frequency(const struct hpi_hsubsys *ph_subsys,
2775 u32 h_control, u32 freq_ink_hz)
2776 {
2777 return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_FREQ,
2778 freq_ink_hz, 0);
2779 }
2780
2781 u16 hpi_tuner_get_frequency(const struct hpi_hsubsys *ph_subsys,
2782 u32 h_control, u32 *pw_freq_ink_hz)
2783 {
2784 return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_FREQ,
2785 pw_freq_ink_hz);
2786 }
2787
2788 u16 hpi_tuner_query_gain(const struct hpi_hsubsys *ph_subsys,
2789 const u32 h_tuner, const u32 index, u16 *pw_gain)
2790 {
2791 u32 qr;
2792 u16 err;
2793
2794 err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2795 &qr);
2796 *pw_gain = (u16)qr;
2797 return err;
2798 }
2799
2800 u16 hpi_tuner_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2801 short gain)
2802 {
2803 return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_GAIN,
2804 gain, 0);
2805 }
2806
2807 u16 hpi_tuner_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2808 short *pn_gain)
2809 {
2810 u32 gain = 0;
2811 u16 error = 0;
2812
2813 error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_GAIN,
2814 &gain);
2815 if (pn_gain)
2816 *pn_gain = (u16)gain;
2817 return error;
2818 }
2819
2820 u16 hpi_tuner_getRF_level(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2821 short *pw_level)
2822 {
2823 struct hpi_message hm;
2824 struct hpi_response hr;
2825 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2826 HPI_CONTROL_GET_STATE);
2827 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2828 hm.u.c.attribute = HPI_TUNER_LEVEL;
2829 hm.u.c.param1 = HPI_TUNER_LEVEL_AVERAGE;
2830 hpi_send_recv(&hm, &hr);
2831 if (pw_level)
2832 *pw_level = (short)hr.u.c.param1;
2833 return hr.error;
2834 }
2835
2836 u16 hpi_tuner_get_rawRF_level(const struct hpi_hsubsys *ph_subsys,
2837 u32 h_control, short *pw_level)
2838 {
2839 struct hpi_message hm;
2840 struct hpi_response hr;
2841 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2842 HPI_CONTROL_GET_STATE);
2843 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2844 hm.u.c.attribute = HPI_TUNER_LEVEL;
2845 hm.u.c.param1 = HPI_TUNER_LEVEL_RAW;
2846 hpi_send_recv(&hm, &hr);
2847 if (pw_level)
2848 *pw_level = (short)hr.u.c.param1;
2849 return hr.error;
2850 }
2851
2852 u16 hpi_tuner_query_deemphasis(const struct hpi_hsubsys *ph_subsys,
2853 const u32 h_tuner, const u32 index, const u16 band, u32 *pdeemphasis)
2854 {
2855 return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_DEEMPHASIS,
2856 index, band, pdeemphasis);
2857 }
2858
2859 u16 hpi_tuner_set_deemphasis(const struct hpi_hsubsys *ph_subsys,
2860 u32 h_control, u32 deemphasis)
2861 {
2862 return hpi_control_param_set(ph_subsys, h_control,
2863 HPI_TUNER_DEEMPHASIS, deemphasis, 0);
2864 }
2865
2866 u16 hpi_tuner_get_deemphasis(const struct hpi_hsubsys *ph_subsys,
2867 u32 h_control, u32 *pdeemphasis)
2868 {
2869 return hpi_control_param1_get(ph_subsys, h_control,
2870 HPI_TUNER_DEEMPHASIS, pdeemphasis);
2871 }
2872
2873 u16 hpi_tuner_query_program(const struct hpi_hsubsys *ph_subsys,
2874 const u32 h_tuner, u32 *pbitmap_program)
2875 {
2876 return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_PROGRAM, 0, 0,
2877 pbitmap_program);
2878 }
2879
2880 u16 hpi_tuner_set_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2881 u32 program)
2882 {
2883 return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2884 program, 0);
2885 }
2886
2887 u16 hpi_tuner_get_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2888 u32 *pprogram)
2889 {
2890 return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2891 pprogram);
2892 }
2893
2894 u16 hpi_tuner_get_hd_radio_dsp_version(const struct hpi_hsubsys *ph_subsys,
2895 u32 h_control, char *psz_dsp_version, const u32 string_size)
2896 {
2897 return hpi_control_get_string(ph_subsys, h_control,
2898 HPI_TUNER_HDRADIO_DSP_VERSION, psz_dsp_version, string_size);
2899 }
2900
2901 u16 hpi_tuner_get_hd_radio_sdk_version(const struct hpi_hsubsys *ph_subsys,
2902 u32 h_control, char *psz_sdk_version, const u32 string_size)
2903 {
2904 return hpi_control_get_string(ph_subsys, h_control,
2905 HPI_TUNER_HDRADIO_SDK_VERSION, psz_sdk_version, string_size);
2906 }
2907
2908 u16 hpi_tuner_get_status(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2909 u16 *pw_status_mask, u16 *pw_status)
2910 {
2911 u32 status = 0;
2912 u16 error = 0;
2913
2914 error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_STATUS,
2915 &status);
2916 if (pw_status) {
2917 if (!error) {
2918 *pw_status_mask = (u16)(status >> 16);
2919 *pw_status = (u16)(status & 0xFFFF);
2920 } else {
2921 *pw_status_mask = 0;
2922 *pw_status = 0;
2923 }
2924 }
2925 return error;
2926 }
2927
2928 u16 hpi_tuner_set_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2929 u32 mode, u32 value)
2930 {
2931 return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_MODE,
2932 mode, value);
2933 }
2934
2935 u16 hpi_tuner_get_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2936 u32 mode, u32 *pn_value)
2937 {
2938 return hpi_control_param_get(ph_subsys, h_control, HPI_TUNER_MODE,
2939 mode, 0, pn_value, NULL);
2940 }
2941
2942 u16 hpi_tuner_get_hd_radio_signal_quality(const struct hpi_hsubsys *ph_subsys,
2943 u32 h_control, u32 *pquality)
2944 {
2945 return hpi_control_param_get(ph_subsys, h_control,
2946 HPI_TUNER_HDRADIO_SIGNAL_QUALITY, 0, 0, pquality, NULL);
2947 }
2948
2949 u16 hpi_tuner_getRDS(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2950 char *p_data)
2951 {
2952 struct hpi_message hm;
2953 struct hpi_response hr;
2954 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2955 HPI_CONTROL_GET_STATE);
2956 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2957 hm.u.c.attribute = HPI_TUNER_RDS;
2958 hpi_send_recv(&hm, &hr);
2959 if (p_data) {
2960 *(u32 *)&p_data[0] = hr.u.cu.tuner.rds.data[0];
2961 *(u32 *)&p_data[4] = hr.u.cu.tuner.rds.data[1];
2962 *(u32 *)&p_data[8] = hr.u.cu.tuner.rds.bLER;
2963 }
2964 return hr.error;
2965 }
2966
2967 u16 HPI_PAD__get_channel_name(const struct hpi_hsubsys *ph_subsys,
2968 u32 h_control, char *psz_string, const u32 data_length)
2969 {
2970 return hpi_control_get_string(ph_subsys, h_control,
2971 HPI_PAD_CHANNEL_NAME, psz_string, data_length);
2972 }
2973
2974 u16 HPI_PAD__get_artist(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2975 char *psz_string, const u32 data_length)
2976 {
2977 return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_ARTIST,
2978 psz_string, data_length);
2979 }
2980
2981 u16 HPI_PAD__get_title(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2982 char *psz_string, const u32 data_length)
2983 {
2984 return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_TITLE,
2985 psz_string, data_length);
2986 }
2987
2988 u16 HPI_PAD__get_comment(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2989 char *psz_string, const u32 data_length)
2990 {
2991 return hpi_control_get_string(ph_subsys, h_control, HPI_PAD_COMMENT,
2992 psz_string, data_length);
2993 }
2994
2995 u16 HPI_PAD__get_program_type(const struct hpi_hsubsys *ph_subsys,
2996 u32 h_control, u32 *ppTY)
2997 {
2998 return hpi_control_param_get(ph_subsys, h_control,
2999 HPI_PAD_PROGRAM_TYPE, 0, 0, ppTY, NULL);
3000 }
3001
3002 u16 HPI_PAD__get_rdsPI(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3003 u32 *ppI)
3004 {
3005 return hpi_control_param_get(ph_subsys, h_control, HPI_PAD_PROGRAM_ID,
3006 0, 0, ppI, NULL);
3007 }
3008
3009 u16 hpi_volume_query_channels(const struct hpi_hsubsys *ph_subsys,
3010 const u32 h_volume, u32 *p_channels)
3011 {
3012 return hpi_control_query(ph_subsys, h_volume, HPI_VOLUME_NUM_CHANNELS,
3013 0, 0, p_channels);
3014 }
3015
3016 u16 hpi_volume_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3017 short an_log_gain[HPI_MAX_CHANNELS]
3018 )
3019 {
3020 struct hpi_message hm;
3021 struct hpi_response hr;
3022 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3023 HPI_CONTROL_SET_STATE);
3024 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3025 memcpy(hm.u.c.an_log_value, an_log_gain,
3026 sizeof(short) * HPI_MAX_CHANNELS);
3027 hm.u.c.attribute = HPI_VOLUME_GAIN;
3028
3029 hpi_send_recv(&hm, &hr);
3030
3031 return hr.error;
3032 }
3033
3034 u16 hpi_volume_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3035 short an_log_gain[HPI_MAX_CHANNELS]
3036 )
3037 {
3038 struct hpi_message hm;
3039 struct hpi_response hr;
3040 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3041 HPI_CONTROL_GET_STATE);
3042 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3043 hm.u.c.attribute = HPI_VOLUME_GAIN;
3044
3045 hpi_send_recv(&hm, &hr);
3046
3047 memcpy(an_log_gain, hr.u.c.an_log_value,
3048 sizeof(short) * HPI_MAX_CHANNELS);
3049 return hr.error;
3050 }
3051
3052 u16 hpi_volume_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3053 short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
3054 {
3055 struct hpi_message hm;
3056 struct hpi_response hr;
3057 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3058 HPI_CONTROL_GET_STATE);
3059 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3060 hm.u.c.attribute = HPI_VOLUME_RANGE;
3061
3062 hpi_send_recv(&hm, &hr);
3063 if (hr.error) {
3064 hr.u.c.an_log_value[0] = 0;
3065 hr.u.c.an_log_value[1] = 0;
3066 hr.u.c.param1 = 0;
3067 }
3068 if (min_gain_01dB)
3069 *min_gain_01dB = hr.u.c.an_log_value[0];
3070 if (max_gain_01dB)
3071 *max_gain_01dB = hr.u.c.an_log_value[1];
3072 if (step_gain_01dB)
3073 *step_gain_01dB = (short)hr.u.c.param1;
3074 return hr.error;
3075 }
3076
3077 u16 hpi_volume_auto_fade_profile(const struct hpi_hsubsys *ph_subsys,
3078 u32 h_control, short an_stop_gain0_01dB[HPI_MAX_CHANNELS],
3079 u32 duration_ms, u16 profile)
3080 {
3081 struct hpi_message hm;
3082 struct hpi_response hr;
3083 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3084 HPI_CONTROL_SET_STATE);
3085 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3086
3087 memcpy(hm.u.c.an_log_value, an_stop_gain0_01dB,
3088 sizeof(short) * HPI_MAX_CHANNELS);
3089
3090 hm.u.c.attribute = HPI_VOLUME_AUTOFADE;
3091 hm.u.c.param1 = duration_ms;
3092 hm.u.c.param2 = profile;
3093
3094 hpi_send_recv(&hm, &hr);
3095
3096 return hr.error;
3097 }
3098
3099 u16 hpi_volume_auto_fade(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3100 short an_stop_gain0_01dB[HPI_MAX_CHANNELS], u32 duration_ms)
3101 {
3102 return hpi_volume_auto_fade_profile(ph_subsys, h_control,
3103 an_stop_gain0_01dB, duration_ms, HPI_VOLUME_AUTOFADE_LOG);
3104 }
3105
3106 u16 hpi_vox_set_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3107 short an_gain0_01dB)
3108 {
3109 struct hpi_message hm;
3110 struct hpi_response hr;
3111 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3112 HPI_CONTROL_SET_STATE);
3113 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3114 hm.u.c.attribute = HPI_VOX_THRESHOLD;
3115
3116 hm.u.c.an_log_value[0] = an_gain0_01dB;
3117
3118 hpi_send_recv(&hm, &hr);
3119
3120 return hr.error;
3121 }
3122
3123 u16 hpi_vox_get_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3124 short *an_gain0_01dB)
3125 {
3126 struct hpi_message hm;
3127 struct hpi_response hr;
3128 hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3129 HPI_CONTROL_GET_STATE);
3130 u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3131 hm.u.c.attribute = HPI_VOX_THRESHOLD;
3132
3133 hpi_send_recv(&hm, &hr);
3134
3135 *an_gain0_01dB = hr.u.c.an_log_value[0];
3136
3137 return hr.error;
3138 }
3139
3140 static size_t strv_packet_size = MIN_STRV_PACKET_SIZE;
3141
3142 static size_t entity_type_to_size[LAST_ENTITY_TYPE] = {
3143 0,
3144 sizeof(struct hpi_entity),
3145 sizeof(void *),
3146
3147 sizeof(int),
3148 sizeof(float),
3149 sizeof(double),
3150
3151 sizeof(char),
3152 sizeof(char),
3153
3154 4 * sizeof(char),
3155 16 * sizeof(char),
3156 6 * sizeof(char),
3157 };
3158
3159 inline size_t hpi_entity_size(struct hpi_entity *entity_ptr)
3160 {
3161 return entity_ptr->header.size;
3162 }
3163
3164 inline size_t hpi_entity_header_size(struct hpi_entity *entity_ptr)
3165 {
3166 return sizeof(entity_ptr->header);
3167 }
3168
3169 inline size_t hpi_entity_value_size(struct hpi_entity *entity_ptr)
3170 {
3171 return hpi_entity_size(entity_ptr) -
3172 hpi_entity_header_size(entity_ptr);
3173 }
3174
3175 inline size_t hpi_entity_item_count(struct hpi_entity *entity_ptr)
3176 {
3177 return hpi_entity_value_size(entity_ptr) /
3178 entity_type_to_size[entity_ptr->header.type];
3179 }
3180
3181 inline struct hpi_entity *hpi_entity_ptr_to_next(struct hpi_entity
3182 *entity_ptr)
3183 {
3184 return (void *)(((uint8_t *) entity_ptr) +
3185 hpi_entity_size(entity_ptr));
3186 }
3187
3188 inline u16 hpi_entity_check_type(const enum e_entity_type t)
3189 {
3190 if (t >= 0 && t < STR_TYPE_FIELD_MAX)
3191 return 0;
3192 return HPI_ERROR_ENTITY_TYPE_INVALID;
3193 }
3194
3195 inline u16 hpi_entity_check_role(const enum e_entity_role r)
3196 {
3197 if (r >= 0 && r < STR_ROLE_FIELD_MAX)
3198 return 0;
3199 return HPI_ERROR_ENTITY_ROLE_INVALID;
3200 }
3201
3202 static u16 hpi_entity_get_next(struct hpi_entity *entity, int recursive_flag,
3203 void *guard_p, struct hpi_entity **next)
3204 {
3205 HPI_DEBUG_ASSERT(entity != NULL);
3206 HPI_DEBUG_ASSERT(next != NULL);
3207 HPI_DEBUG_ASSERT(hpi_entity_size(entity) != 0);
3208
3209 if (guard_p <= (void *)entity) {
3210 *next = NULL;
3211 return 0;
3212 }
3213
3214 if (recursive_flag && entity->header.type == entity_type_sequence)
3215 *next = (struct hpi_entity *)entity->value;
3216 else
3217 *next = (struct hpi_entity *)hpi_entity_ptr_to_next(entity);
3218
3219 if (guard_p <= (void *)*next) {
3220 *next = NULL;
3221 return 0;
3222 }
3223
3224 HPI_DEBUG_ASSERT(guard_p >= (void *)hpi_entity_ptr_to_next(*next));
3225 return 0;
3226 }
3227
3228 u16 hpi_entity_find_next(struct hpi_entity *container_entity,
3229 enum e_entity_type type, enum e_entity_role role, int recursive_flag,
3230 struct hpi_entity **current_match)
3231 {
3232 struct hpi_entity *tmp = NULL;
3233 void *guard_p = NULL;
3234
3235 HPI_DEBUG_ASSERT(container_entity != NULL);
3236 guard_p = hpi_entity_ptr_to_next(container_entity);
3237
3238 if (*current_match != NULL)
3239 hpi_entity_get_next(*current_match, recursive_flag, guard_p,
3240 &tmp);
3241 else
3242 hpi_entity_get_next(container_entity, 1, guard_p, &tmp);
3243
3244 while (tmp) {
3245 u16 err;
3246
3247 HPI_DEBUG_ASSERT((void *)tmp >= (void *)container_entity);
3248
3249 if ((!type || tmp->header.type == type) && (!role
3250 || tmp->header.role == role)) {
3251 *current_match = tmp;
3252 return 0;
3253 }
3254
3255 err = hpi_entity_get_next(tmp, recursive_flag, guard_p,
3256 current_match);
3257 if (err)
3258 return err;
3259
3260 tmp = *current_match;
3261 }
3262
3263 *current_match = NULL;
3264 return 0;
3265 }
3266
3267 void hpi_entity_free(struct hpi_entity *entity)
3268 {
3269 if (entity != NULL)
3270 kfree(entity);
3271 }
3272
3273 static u16 hpi_entity_alloc_and_copy(struct hpi_entity *src,
3274 struct hpi_entity **dst)
3275 {
3276 size_t buf_size;
3277 HPI_DEBUG_ASSERT(dst != NULL);
3278 HPI_DEBUG_ASSERT(src != NULL);
3279
3280 buf_size = hpi_entity_size(src);
3281 *dst = kmalloc(buf_size, GFP_KERNEL);
3282 if (dst == NULL)
3283 return HPI_ERROR_MEMORY_ALLOC;
3284 memcpy(*dst, src, buf_size);
3285 return 0;
3286 }
3287
3288 u16 hpi_universal_info(const struct hpi_hsubsys *ph_subsys, u32 hC,
3289 struct hpi_entity **info)
3290 {
3291 struct hpi_msg_strv hm;
3292 struct hpi_res_strv *phr;
3293 u16 hpi_err;
3294 int remaining_attempts = 2;
3295 size_t resp_packet_size = 1024;
3296
3297 *info = NULL;
3298
3299 while (remaining_attempts--) {
3300 phr = kmalloc(resp_packet_size, GFP_KERNEL);
3301 HPI_DEBUG_ASSERT(phr != NULL);
3302
3303 hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3304 (u16)resp_packet_size, HPI_OBJ_CONTROL,
3305 HPI_CONTROL_GET_INFO);
3306 u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3307
3308 hm.strv.header.size = sizeof(hm.strv);
3309 phr->strv.header.size = resp_packet_size - sizeof(phr->h);
3310
3311 hpi_send_recv((struct hpi_message *)&hm.h,
3312 (struct hpi_response *)&phr->h);
3313 if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3314
3315 HPI_DEBUG_ASSERT(phr->h.specific_error >
3316 MIN_STRV_PACKET_SIZE
3317 && phr->h.specific_error < 1500);
3318 resp_packet_size = phr->h.specific_error;
3319 } else {
3320 remaining_attempts = 0;
3321 if (!phr->h.error)
3322 hpi_entity_alloc_and_copy(&phr->strv, info);
3323 }
3324
3325 hpi_err = phr->h.error;
3326 kfree(phr);
3327 }
3328
3329 return hpi_err;
3330 }
3331
3332 u16 hpi_universal_get(const struct hpi_hsubsys *ph_subsys, u32 hC,
3333 struct hpi_entity **value)
3334 {
3335 struct hpi_msg_strv hm;
3336 struct hpi_res_strv *phr;
3337 u16 hpi_err;
3338 int remaining_attempts = 2;
3339
3340 *value = NULL;
3341
3342 while (remaining_attempts--) {
3343 phr = kmalloc(strv_packet_size, GFP_KERNEL);
3344 if (!phr)
3345 return HPI_ERROR_MEMORY_ALLOC;
3346
3347 hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3348 (u16)strv_packet_size, HPI_OBJ_CONTROL,
3349 HPI_CONTROL_GET_STATE);
3350 u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3351
3352 hm.strv.header.size = sizeof(hm.strv);
3353 phr->strv.header.size = strv_packet_size - sizeof(phr->h);
3354
3355 hpi_send_recv((struct hpi_message *)&hm.h,
3356 (struct hpi_response *)&phr->h);
3357 if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3358
3359 HPI_DEBUG_ASSERT(phr->h.specific_error >
3360 MIN_STRV_PACKET_SIZE
3361 && phr->h.specific_error < 1000);
3362 strv_packet_size = phr->h.specific_error;
3363 } else {
3364 remaining_attempts = 0;
3365 if (!phr->h.error)
3366 hpi_entity_alloc_and_copy(&phr->strv, value);
3367 }
3368
3369 hpi_err = phr->h.error;
3370 kfree(phr);
3371 }
3372
3373 return hpi_err;
3374 }
3375
3376 u16 hpi_universal_set(const struct hpi_hsubsys *ph_subsys, u32 hC,
3377 struct hpi_entity *value)
3378 {
3379 struct hpi_msg_strv *phm;
3380 struct hpi_res_strv hr;
3381
3382 phm = kmalloc(sizeof(phm->h) + value->header.size, GFP_KERNEL);
3383 HPI_DEBUG_ASSERT(phm != NULL);
3384
3385 hpi_init_message_responseV1(&phm->h,
3386 sizeof(phm->h) + value->header.size, &hr.h, sizeof(hr),
3387 HPI_OBJ_CONTROL, HPI_CONTROL_SET_STATE);
3388 u32TOINDEXES(hC, &phm->h.adapter_index, &phm->h.obj_index);
3389 hr.strv.header.size = sizeof(hr.strv);
3390
3391 memcpy(&phm->strv, value, value->header.size);
3392 hpi_send_recv((struct hpi_message *)&phm->h,
3393 (struct hpi_response *)&hr.h);
3394
3395 return hr.h.error;
3396 }
3397
3398 u16 hpi_entity_alloc_and_pack(const enum e_entity_type type,
3399 const size_t item_count, const enum e_entity_role role, void *value,
3400 struct hpi_entity **entity)
3401 {
3402 size_t bytes_to_copy, total_size;
3403 u16 hE = 0;
3404 *entity = NULL;
3405
3406 hE = hpi_entity_check_type(type);
3407 if (hE)
3408 return hE;
3409
3410 HPI_DEBUG_ASSERT(role > entity_role_null && type < LAST_ENTITY_ROLE);
3411
3412 bytes_to_copy = entity_type_to_size[type] * item_count;
3413 total_size = hpi_entity_header_size(*entity) + bytes_to_copy;
3414
3415 HPI_DEBUG_ASSERT(total_size >= hpi_entity_header_size(*entity)
3416 && total_size < STR_SIZE_FIELD_MAX);
3417
3418 *entity = kmalloc(total_size, GFP_KERNEL);
3419 if (*entity == NULL)
3420 return HPI_ERROR_MEMORY_ALLOC;
3421 memcpy((*entity)->value, value, bytes_to_copy);
3422 (*entity)->header.size =
3423 hpi_entity_header_size(*entity) + bytes_to_copy;
3424 (*entity)->header.type = type;
3425 (*entity)->header.role = role;
3426 return 0;
3427 }
3428
3429 u16 hpi_entity_copy_value_from(struct hpi_entity *entity,
3430 enum e_entity_type type, size_t item_count, void *value_dst_p)
3431 {
3432 size_t bytes_to_copy;
3433
3434 if (entity->header.type != type)
3435 return HPI_ERROR_ENTITY_TYPE_MISMATCH;
3436
3437 if (hpi_entity_item_count(entity) != item_count)
3438 return HPI_ERROR_ENTITY_ITEM_COUNT;
3439
3440 bytes_to_copy = entity_type_to_size[type] * item_count;
3441 memcpy(value_dst_p, entity->value, bytes_to_copy);
3442 return 0;
3443 }
3444
3445 u16 hpi_entity_unpack(struct hpi_entity *entity, enum e_entity_type *type,
3446 size_t *item_count, enum e_entity_role *role, void **value)
3447 {
3448 u16 err = 0;
3449 HPI_DEBUG_ASSERT(entity != NULL);
3450
3451 if (type)
3452 *type = entity->header.type;
3453
3454 if (role)
3455 *role = entity->header.role;
3456
3457 if (value)
3458 *value = entity->value;
3459
3460 if (item_count != NULL) {
3461 if (entity->header.type == entity_type_sequence) {
3462 void *guard_p = hpi_entity_ptr_to_next(entity);
3463 struct hpi_entity *next = NULL;
3464 void *contents = entity->value;
3465
3466 *item_count = 0;
3467 while (contents < guard_p) {
3468 (*item_count)++;
3469 err = hpi_entity_get_next(contents, 0,
3470 guard_p, &next);
3471 if (next == NULL || err)
3472 break;
3473 contents = next;
3474 }
3475 } else {
3476 *item_count = hpi_entity_item_count(entity);
3477 }
3478 }
3479 return err;
3480 }
3481
3482 u16 hpi_gpio_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3483 u32 *ph_gpio, u16 *pw_number_input_bits, u16 *pw_number_output_bits)
3484 {
3485 struct hpi_message hm;
3486 struct hpi_response hr;
3487 hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_OPEN);
3488 hm.adapter_index = adapter_index;
3489
3490 hpi_send_recv(&hm, &hr);
3491
3492 if (hr.error == 0) {
3493 *ph_gpio =
3494 hpi_indexes_to_handle(HPI_OBJ_GPIO, adapter_index, 0);
3495 if (pw_number_input_bits)
3496 *pw_number_input_bits = hr.u.l.number_input_bits;
3497 if (pw_number_output_bits)
3498 *pw_number_output_bits = hr.u.l.number_output_bits;
3499 } else
3500 *ph_gpio = 0;
3501 return hr.error;
3502 }
3503
3504 u16 hpi_gpio_read_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3505 u16 bit_index, u16 *pw_bit_data)
3506 {
3507 struct hpi_message hm;
3508 struct hpi_response hr;
3509 hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_BIT);
3510 u32TOINDEX(h_gpio, &hm.adapter_index);
3511 hm.u.l.bit_index = bit_index;
3512
3513 hpi_send_recv(&hm, &hr);
3514
3515 *pw_bit_data = hr.u.l.bit_data[0];
3516 return hr.error;
3517 }
3518
3519 u16 hpi_gpio_read_all_bits(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3520 u16 aw_all_bit_data[4]
3521 )
3522 {
3523 struct hpi_message hm;
3524 struct hpi_response hr;
3525 hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_ALL);
3526 u32TOINDEX(h_gpio, &hm.adapter_index);
3527
3528 hpi_send_recv(&hm, &hr);
3529
3530 if (aw_all_bit_data) {
3531 aw_all_bit_data[0] = hr.u.l.bit_data[0];
3532 aw_all_bit_data[1] = hr.u.l.bit_data[1];
3533 aw_all_bit_data[2] = hr.u.l.bit_data[2];
3534 aw_all_bit_data[3] = hr.u.l.bit_data[3];
3535 }
3536 return hr.error;
3537 }
3538
3539 u16 hpi_gpio_write_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3540 u16 bit_index, u16 bit_data)
3541 {
3542 struct hpi_message hm;
3543 struct hpi_response hr;
3544 hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_WRITE_BIT);
3545 u32TOINDEX(h_gpio, &hm.adapter_index);
3546 hm.u.l.bit_index = bit_index;
3547 hm.u.l.bit_data = bit_data;
3548
3549 hpi_send_recv(&hm, &hr);
3550
3551 return hr.error;
3552 }
3553
3554 u16 hpi_gpio_write_status(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3555 u16 aw_all_bit_data[4]
3556 )
3557 {
3558 struct hpi_message hm;
3559 struct hpi_response hr;
3560 hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO,
3561 HPI_GPIO_WRITE_STATUS);
3562 u32TOINDEX(h_gpio, &hm.adapter_index);
3563
3564 hpi_send_recv(&hm, &hr);
3565
3566 if (aw_all_bit_data) {
3567 aw_all_bit_data[0] = hr.u.l.bit_data[0];
3568 aw_all_bit_data[1] = hr.u.l.bit_data[1];
3569 aw_all_bit_data[2] = hr.u.l.bit_data[2];
3570 aw_all_bit_data[3] = hr.u.l.bit_data[3];
3571 }
3572 return hr.error;
3573 }
3574
3575 u16 hpi_async_event_open(const struct hpi_hsubsys *ph_subsys,
3576 u16 adapter_index, u32 *ph_async)
3577 {
3578 struct hpi_message hm;
3579 struct hpi_response hr;
3580 hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3581 HPI_ASYNCEVENT_OPEN);
3582 hm.adapter_index = adapter_index;
3583
3584 hpi_send_recv(&hm, &hr);
3585
3586 if (hr.error == 0)
3587
3588 *ph_async =
3589 hpi_indexes_to_handle(HPI_OBJ_ASYNCEVENT,
3590 adapter_index, 0);
3591 else
3592 *ph_async = 0;
3593 return hr.error;
3594
3595 }
3596
3597 u16 hpi_async_event_close(const struct hpi_hsubsys *ph_subsys, u32 h_async)
3598 {
3599 struct hpi_message hm;
3600 struct hpi_response hr;
3601 hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3602 HPI_ASYNCEVENT_OPEN);
3603 u32TOINDEX(h_async, &hm.adapter_index);
3604
3605 hpi_send_recv(&hm, &hr);
3606
3607 return hr.error;
3608 }
3609
3610 u16 hpi_async_event_wait(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3611 u16 maximum_events, struct hpi_async_event *p_events,
3612 u16 *pw_number_returned)
3613 {
3614 return 0;
3615 }
3616
3617 u16 hpi_async_event_get_count(const struct hpi_hsubsys *ph_subsys,
3618 u32 h_async, u16 *pw_count)
3619 {
3620 struct hpi_message hm;
3621 struct hpi_response hr;
3622 hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3623 HPI_ASYNCEVENT_GETCOUNT);
3624 u32TOINDEX(h_async, &hm.adapter_index);
3625
3626 hpi_send_recv(&hm, &hr);
3627
3628 if (hr.error == 0)
3629 if (pw_count)
3630 *pw_count = hr.u.as.u.count.count;
3631
3632 return hr.error;
3633 }
3634
3635 u16 hpi_async_event_get(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3636 u16 maximum_events, struct hpi_async_event *p_events,
3637 u16 *pw_number_returned)
3638 {
3639 struct hpi_message hm;
3640 struct hpi_response hr;
3641 hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3642 HPI_ASYNCEVENT_GET);
3643 u32TOINDEX(h_async, &hm.adapter_index);
3644
3645 hpi_send_recv(&hm, &hr);
3646 if (!hr.error) {
3647 memcpy(p_events, &hr.u.as.u.event,
3648 sizeof(struct hpi_async_event));
3649 *pw_number_returned = 1;
3650 }
3651
3652 return hr.error;
3653 }
3654
3655 u16 hpi_nv_memory_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3656 u32 *ph_nv_memory, u16 *pw_size_in_bytes)
3657 {
3658 struct hpi_message hm;
3659 struct hpi_response hr;
3660 hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3661 HPI_NVMEMORY_OPEN);
3662 hm.adapter_index = adapter_index;
3663
3664 hpi_send_recv(&hm, &hr);
3665
3666 if (hr.error == 0) {
3667 *ph_nv_memory =
3668 hpi_indexes_to_handle(HPI_OBJ_NVMEMORY, adapter_index,
3669 0);
3670 if (pw_size_in_bytes)
3671 *pw_size_in_bytes = hr.u.n.size_in_bytes;
3672 } else
3673 *ph_nv_memory = 0;
3674 return hr.error;
3675 }
3676
3677 u16 hpi_nv_memory_read_byte(const struct hpi_hsubsys *ph_subsys,
3678 u32 h_nv_memory, u16 index, u16 *pw_data)
3679 {
3680 struct hpi_message hm;
3681 struct hpi_response hr;
3682 hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3683 HPI_NVMEMORY_READ_BYTE);
3684 u32TOINDEX(h_nv_memory, &hm.adapter_index);
3685 hm.u.n.address = index;
3686
3687 hpi_send_recv(&hm, &hr);
3688
3689 *pw_data = hr.u.n.data;
3690 return hr.error;
3691 }
3692
3693 u16 hpi_nv_memory_write_byte(const struct hpi_hsubsys *ph_subsys,
3694 u32 h_nv_memory, u16 index, u16 data)
3695 {
3696 struct hpi_message hm;
3697 struct hpi_response hr;
3698 hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3699 HPI_NVMEMORY_WRITE_BYTE);
3700 u32TOINDEX(h_nv_memory, &hm.adapter_index);
3701 hm.u.n.address = index;
3702 hm.u.n.data = data;
3703
3704 hpi_send_recv(&hm, &hr);
3705
3706 return hr.error;
3707 }
3708
3709 u16 hpi_profile_open_all(const struct hpi_hsubsys *ph_subsys,
3710 u16 adapter_index, u16 profile_index, u32 *ph_profile,
3711 u16 *pw_max_profiles)
3712 {
3713 struct hpi_message hm;
3714 struct hpi_response hr;
3715 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3716 HPI_PROFILE_OPEN_ALL);
3717 hm.adapter_index = adapter_index;
3718 hm.obj_index = profile_index;
3719 hpi_send_recv(&hm, &hr);
3720
3721 *pw_max_profiles = hr.u.p.u.o.max_profiles;
3722 if (hr.error == 0)
3723 *ph_profile =
3724 hpi_indexes_to_handle(HPI_OBJ_PROFILE, adapter_index,
3725 profile_index);
3726 else
3727 *ph_profile = 0;
3728 return hr.error;
3729 }
3730
3731 u16 hpi_profile_get(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3732 u16 bin_index, u16 *pw_seconds, u32 *pmicro_seconds, u32 *pcall_count,
3733 u32 *pmax_micro_seconds, u32 *pmin_micro_seconds)
3734 {
3735 struct hpi_message hm;
3736 struct hpi_response hr;
3737 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE, HPI_PROFILE_GET);
3738 u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3739 hm.u.p.bin_index = bin_index;
3740 hpi_send_recv(&hm, &hr);
3741 if (pw_seconds)
3742 *pw_seconds = hr.u.p.u.t.seconds;
3743 if (pmicro_seconds)
3744 *pmicro_seconds = hr.u.p.u.t.micro_seconds;
3745 if (pcall_count)
3746 *pcall_count = hr.u.p.u.t.call_count;
3747 if (pmax_micro_seconds)
3748 *pmax_micro_seconds = hr.u.p.u.t.max_micro_seconds;
3749 if (pmin_micro_seconds)
3750 *pmin_micro_seconds = hr.u.p.u.t.min_micro_seconds;
3751 return hr.error;
3752 }
3753
3754 u16 hpi_profile_get_utilization(const struct hpi_hsubsys *ph_subsys,
3755 u32 h_profile, u32 *putilization)
3756 {
3757 struct hpi_message hm;
3758 struct hpi_response hr;
3759 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3760 HPI_PROFILE_GET_UTILIZATION);
3761 u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3762 hpi_send_recv(&hm, &hr);
3763 if (hr.error) {
3764 if (putilization)
3765 *putilization = 0;
3766 } else {
3767 if (putilization)
3768 *putilization = hr.u.p.u.t.call_count;
3769 }
3770 return hr.error;
3771 }
3772
3773 u16 hpi_profile_get_name(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3774 u16 bin_index, char *sz_name, u16 name_length)
3775 {
3776 struct hpi_message hm;
3777 struct hpi_response hr;
3778 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3779 HPI_PROFILE_GET_NAME);
3780 u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3781 hm.u.p.bin_index = bin_index;
3782 hpi_send_recv(&hm, &hr);
3783 if (hr.error) {
3784 if (sz_name)
3785 strcpy(sz_name, "??");
3786 } else {
3787 if (sz_name)
3788 memcpy(sz_name, (char *)hr.u.p.u.n.sz_name,
3789 name_length);
3790 }
3791 return hr.error;
3792 }
3793
3794 u16 hpi_profile_start_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3795 {
3796 struct hpi_message hm;
3797 struct hpi_response hr;
3798 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3799 HPI_PROFILE_START_ALL);
3800 u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3801 hpi_send_recv(&hm, &hr);
3802
3803 return hr.error;
3804 }
3805
3806 u16 hpi_profile_stop_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3807 {
3808 struct hpi_message hm;
3809 struct hpi_response hr;
3810 hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3811 HPI_PROFILE_STOP_ALL);
3812 u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3813 hpi_send_recv(&hm, &hr);
3814
3815 return hr.error;
3816 }
3817
3818 u16 hpi_watchdog_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3819 u32 *ph_watchdog)
3820 {
3821 struct hpi_message hm;
3822 struct hpi_response hr;
3823 hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3824 HPI_WATCHDOG_OPEN);
3825 hm.adapter_index = adapter_index;
3826
3827 hpi_send_recv(&hm, &hr);
3828
3829 if (hr.error == 0)
3830 *ph_watchdog =
3831 hpi_indexes_to_handle(HPI_OBJ_WATCHDOG, adapter_index,
3832 0);
3833 else
3834 *ph_watchdog = 0;
3835 return hr.error;
3836 }
3837
3838 u16 hpi_watchdog_set_time(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog,
3839 u32 time_millisec)
3840 {
3841 struct hpi_message hm;
3842 struct hpi_response hr;
3843 hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3844 HPI_WATCHDOG_SET_TIME);
3845 u32TOINDEX(h_watchdog, &hm.adapter_index);
3846 hm.u.w.time_ms = time_millisec;
3847
3848 hpi_send_recv(&hm, &hr);
3849
3850 return hr.error;
3851 }
3852
3853 u16 hpi_watchdog_ping(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog)
3854 {
3855 struct hpi_message hm;
3856 struct hpi_response hr;
3857 hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3858 HPI_WATCHDOG_PING);
3859 u32TOINDEX(h_watchdog, &hm.adapter_index);
3860
3861 hpi_send_recv(&hm, &hr);
3862
3863 return hr.error;
3864 }
This page took 0.118658 seconds and 5 git commands to generate.