This file is indexed.

/usr/include/android-22/hardware/camera3.h is in android-headers-22 23-0ubuntu4.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

   1
   2
   3
   4
   5
   6
   7
   8
   9
  10
  11
  12
  13
  14
  15
  16
  17
  18
  19
  20
  21
  22
  23
  24
  25
  26
  27
  28
  29
  30
  31
  32
  33
  34
  35
  36
  37
  38
  39
  40
  41
  42
  43
  44
  45
  46
  47
  48
  49
  50
  51
  52
  53
  54
  55
  56
  57
  58
  59
  60
  61
  62
  63
  64
  65
  66
  67
  68
  69
  70
  71
  72
  73
  74
  75
  76
  77
  78
  79
  80
  81
  82
  83
  84
  85
  86
  87
  88
  89
  90
  91
  92
  93
  94
  95
  96
  97
  98
  99
 100
 101
 102
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124
 125
 126
 127
 128
 129
 130
 131
 132
 133
 134
 135
 136
 137
 138
 139
 140
 141
 142
 143
 144
 145
 146
 147
 148
 149
 150
 151
 152
 153
 154
 155
 156
 157
 158
 159
 160
 161
 162
 163
 164
 165
 166
 167
 168
 169
 170
 171
 172
 173
 174
 175
 176
 177
 178
 179
 180
 181
 182
 183
 184
 185
 186
 187
 188
 189
 190
 191
 192
 193
 194
 195
 196
 197
 198
 199
 200
 201
 202
 203
 204
 205
 206
 207
 208
 209
 210
 211
 212
 213
 214
 215
 216
 217
 218
 219
 220
 221
 222
 223
 224
 225
 226
 227
 228
 229
 230
 231
 232
 233
 234
 235
 236
 237
 238
 239
 240
 241
 242
 243
 244
 245
 246
 247
 248
 249
 250
 251
 252
 253
 254
 255
 256
 257
 258
 259
 260
 261
 262
 263
 264
 265
 266
 267
 268
 269
 270
 271
 272
 273
 274
 275
 276
 277
 278
 279
 280
 281
 282
 283
 284
 285
 286
 287
 288
 289
 290
 291
 292
 293
 294
 295
 296
 297
 298
 299
 300
 301
 302
 303
 304
 305
 306
 307
 308
 309
 310
 311
 312
 313
 314
 315
 316
 317
 318
 319
 320
 321
 322
 323
 324
 325
 326
 327
 328
 329
 330
 331
 332
 333
 334
 335
 336
 337
 338
 339
 340
 341
 342
 343
 344
 345
 346
 347
 348
 349
 350
 351
 352
 353
 354
 355
 356
 357
 358
 359
 360
 361
 362
 363
 364
 365
 366
 367
 368
 369
 370
 371
 372
 373
 374
 375
 376
 377
 378
 379
 380
 381
 382
 383
 384
 385
 386
 387
 388
 389
 390
 391
 392
 393
 394
 395
 396
 397
 398
 399
 400
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411
 412
 413
 414
 415
 416
 417
 418
 419
 420
 421
 422
 423
 424
 425
 426
 427
 428
 429
 430
 431
 432
 433
 434
 435
 436
 437
 438
 439
 440
 441
 442
 443
 444
 445
 446
 447
 448
 449
 450
 451
 452
 453
 454
 455
 456
 457
 458
 459
 460
 461
 462
 463
 464
 465
 466
 467
 468
 469
 470
 471
 472
 473
 474
 475
 476
 477
 478
 479
 480
 481
 482
 483
 484
 485
 486
 487
 488
 489
 490
 491
 492
 493
 494
 495
 496
 497
 498
 499
 500
 501
 502
 503
 504
 505
 506
 507
 508
 509
 510
 511
 512
 513
 514
 515
 516
 517
 518
 519
 520
 521
 522
 523
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592
2593
2594
2595
2596
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675
2676
2677
2678
2679
2680
2681
2682
2683
2684
2685
2686
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699
2700
2701
2702
2703
2704
2705
2706
2707
2708
2709
2710
2711
2712
2713
2714
2715
2716
2717
2718
/*
 * Copyright (C) 2013 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ANDROID_INCLUDE_CAMERA3_H
#define ANDROID_INCLUDE_CAMERA3_H

#include <system/camera_metadata.h>
#include "camera_common.h"

/**
 * Camera device HAL 3.2 [ CAMERA_DEVICE_API_VERSION_3_2 ]
 *
 * This is the current recommended version of the camera device HAL.
 *
 * Supports the android.hardware.Camera API, and as of v3.2, the
 * android.hardware.camera2 API in LIMITED or FULL modes.
 *
 * Camera devices that support this version of the HAL must return
 * CAMERA_DEVICE_API_VERSION_3_2 in camera_device_t.common.version and in
 * camera_info_t.device_version (from camera_module_t.get_camera_info).
 *
 * CAMERA_DEVICE_API_VERSION_3_2:
 *    Camera modules that may contain version 3.2 devices must implement at
 *    least version 2.2 of the camera module interface (as defined by
 *    camera_module_t.common.module_api_version).
 *
 * <= CAMERA_DEVICE_API_VERSION_3_1:
 *    Camera modules that may contain version 3.1 (or 3.0) devices must
 *    implement at least version 2.0 of the camera module interface
 *    (as defined by camera_module_t.common.module_api_version).
 *
 * See camera_common.h for more versioning details.
 *
 * Documentation index:
 *   S1. Version history
 *   S2. Startup and operation sequencing
 *   S3. Operational modes
 *   S4. 3A modes and state machines
 *   S5. Cropping
 *   S6. Error management
 *   S7. Key Performance Indicator (KPI) glossary
 *   S8. Sample Use Cases
 *   S9. Notes on Controls and Metadata
 */

/**
 * S1. Version history:
 *
 * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]:
 *
 *   - Converted from C++ CameraHardwareInterface abstraction layer.
 *
 *   - Supports android.hardware.Camera API.
 *
 * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:
 *
 *   - Sufficient for implementing existing android.hardware.Camera API.
 *
 *   - Allows for ZSL queue in camera service layer
 *
 *   - Not tested for any new features such manual capture control, Bayer RAW
 *     capture, reprocessing of RAW data.
 *
 * 3.0: First revision of expanded-capability HAL:
 *
 *   - Major version change since the ABI is completely different. No change to
 *     the required hardware capabilities or operational model from 2.0.
 *
 *   - Reworked input request and stream queue interfaces: Framework calls into
 *     HAL with next request and stream buffers already dequeued. Sync framework
 *     support is included, necessary for efficient implementations.
 *
 *   - Moved triggers into requests, most notifications into results.
 *
 *   - Consolidated all callbacks into framework into one structure, and all
 *     setup methods into a single initialize() call.
 *
 *   - Made stream configuration into a single call to simplify stream
 *     management. Bidirectional streams replace STREAM_FROM_STREAM construct.
 *
 *   - Limited mode semantics for older/limited hardware devices.
 *
 * 3.1: Minor revision of expanded-capability HAL:
 *
 *   - configure_streams passes consumer usage flags to the HAL.
 *
 *   - flush call to drop all in-flight requests/buffers as fast as possible.
 *
 * 3.2: Minor revision of expanded-capability HAL:
 *
 *   - Deprecates get_metadata_vendor_tag_ops.  Please use get_vendor_tag_ops
 *     in camera_common.h instead.
 *
 *   - register_stream_buffers deprecated. All gralloc buffers provided
 *     by framework to HAL in process_capture_request may be new at any time.
 *
 *   - add partial result support. process_capture_result may be called
 *     multiple times with a subset of the available result before the full
 *     result is available.
 *
 *   - add manual template to camera3_request_template. The applications may
 *     use this template to control the capture settings directly.
 *
 *   - Rework the bidirectional and input stream specifications.
 *
 *   - change the input buffer return path. The buffer is returned in
 *     process_capture_result instead of process_capture_request.
 *
 */

/**
 * S2. Startup and general expected operation sequence:
 *
 * 1. Framework calls camera_module_t->common.open(), which returns a
 *    hardware_device_t structure.
 *
 * 2. Framework inspects the hardware_device_t->version field, and instantiates
 *    the appropriate handler for that version of the camera hardware device. In
 *    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to
 *    a camera3_device_t.
 *
 * 3. Framework calls camera3_device_t->ops->initialize() with the framework
 *    callback function pointers. This will only be called this one time after
 *    open(), before any other functions in the ops structure are called.
 *
 * 4. The framework calls camera3_device_t->ops->configure_streams() with a list
 *    of input/output streams to the HAL device.
 *
 * 5. <= CAMERA_DEVICE_API_VERSION_3_1:
 *
 *    The framework allocates gralloc buffers and calls
 *    camera3_device_t->ops->register_stream_buffers() for at least one of the
 *    output streams listed in configure_streams. The same stream is registered
 *    only once.
 *
 *    >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 *    camera3_device_t->ops->register_stream_buffers() is not called and must
 *    be NULL.
 *
 * 6. The framework requests default settings for some number of use cases with
 *    calls to camera3_device_t->ops->construct_default_request_settings(). This
 *    may occur any time after step 3.
 *
 * 7. The framework constructs and sends the first capture request to the HAL,
 *    with settings based on one of the sets of default settings, and with at
 *    least one output stream, which has been registered earlier by the
 *    framework. This is sent to the HAL with
 *    camera3_device_t->ops->process_capture_request(). The HAL must block the
 *    return of this call until it is ready for the next request to be sent.
 *
 *    >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 *    The buffer_handle_t provided in the camera3_stream_buffer_t array
 *    in the camera3_capture_request_t may be new and never-before-seen
 *    by the HAL on any given new request.
 *
 * 8. The framework continues to submit requests, and call
 *    construct_default_request_settings to get default settings buffers for
 *    other use cases.
 *
 *    <= CAMERA_DEVICE_API_VERSION_3_1:
 *
 *    The framework may call register_stream_buffers() at this time for
 *    not-yet-registered streams.
 *
 * 9. When the capture of a request begins (sensor starts exposing for the
 *    capture), the HAL calls camera3_callback_ops_t->notify() with the SHUTTER
 *    event, including the frame number and the timestamp for start of exposure.
 *
 *    <= CAMERA_DEVICE_API_VERSION_3_1:
 *
 *    This notify call must be made before the first call to
 *    process_capture_result() for that frame number.
 *
 *    >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 *    The camera3_callback_ops_t->notify() call with the SHUTTER event should
 *    be made as early as possible since the framework will be unable to
 *    deliver gralloc buffers to the application layer (for that frame) until
 *    it has a valid timestamp for the start of exposure.
 *
 *    Both partial metadata results and the gralloc buffers may be sent to the
 *    framework at any time before or after the SHUTTER event.
 *
 * 10. After some pipeline delay, the HAL begins to return completed captures to
 *    the framework with camera3_callback_ops_t->process_capture_result(). These
 *    are returned in the same order as the requests were submitted. Multiple
 *    requests can be in flight at once, depending on the pipeline depth of the
 *    camera HAL device.
 *
 *    >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 *    Once a buffer is returned by process_capture_result as part of the
 *    camera3_stream_buffer_t array, and the fence specified by release_fence
 *    has been signaled (this is a no-op for -1 fences), the ownership of that
 *    buffer is considered to be transferred back to the framework. After that,
 *    the HAL must no longer retain that particular buffer, and the
 *    framework may clean up the memory for it immediately.
 *
 *    process_capture_result may be called multiple times for a single frame,
 *    each time with a new disjoint piece of metadata and/or set of gralloc
 *    buffers. The framework will accumulate these partial metadata results
 *    into one result.
 *
 *    In particular, it is legal for a process_capture_result to be called
 *    simultaneously for both a frame N and a frame N+1 as long as the
 *    above rule holds for gralloc buffers (both input and output).
 *
 * 11. After some time, the framework may stop submitting new requests, wait for
 *    the existing captures to complete (all buffers filled, all results
 *    returned), and then call configure_streams() again. This resets the camera
 *    hardware and pipeline for a new set of input/output streams. Some streams
 *    may be reused from the previous configuration; if these streams' buffers
 *    had already been registered with the HAL, they will not be registered
 *    again. The framework then continues from step 7, if at least one
 *    registered output stream remains (otherwise, step 5 is required first).
 *
 * 12. Alternatively, the framework may call camera3_device_t->common->close()
 *    to end the camera session. This may be called at any time when no other
 *    calls from the framework are active, although the call may block until all
 *    in-flight captures have completed (all results returned, all buffers
 *    filled). After the close call returns, no more calls to the
 *    camera3_callback_ops_t functions are allowed from the HAL. Once the
 *    close() call is underway, the framework may not call any other HAL device
 *    functions.
 *
 * 13. In case of an error or other asynchronous event, the HAL must call
 *    camera3_callback_ops_t->notify() with the appropriate error/event
 *    message. After returning from a fatal device-wide error notification, the
 *    HAL should act as if close() had been called on it. However, the HAL must
 *    either cancel or complete all outstanding captures before calling
 *    notify(), so that once notify() is called with a fatal error, the
 *    framework will not receive further callbacks from the device. Methods
 *    besides close() should return -ENODEV or NULL after the notify() method
 *    returns from a fatal error message.
 */

/**
 * S3. Operational modes:
 *
 * The camera 3 HAL device can implement one of two possible operational modes;
 * limited and full. Full support is expected from new higher-end
 * devices. Limited mode has hardware requirements roughly in line with those
 * for a camera HAL device v1 implementation, and is expected from older or
 * inexpensive devices. Full is a strict superset of limited, and they share the
 * same essential operational flow, as documented above.
 *
 * The HAL must indicate its level of support with the
 * android.info.supportedHardwareLevel static metadata entry, with 0 indicating
 * limited mode, and 1 indicating full mode support.
 *
 * Roughly speaking, limited-mode devices do not allow for application control
 * of capture settings (3A control only), high-rate capture of high-resolution
 * images, raw sensor readout, or support for YUV output streams above maximum
 * recording resolution (JPEG only for large images).
 *
 * ** Details of limited mode behavior:
 *
 * - Limited-mode devices do not need to implement accurate synchronization
 *   between capture request settings and the actual image data
 *   captured. Instead, changes to settings may take effect some time in the
 *   future, and possibly not for the same output frame for each settings
 *   entry. Rapid changes in settings may result in some settings never being
 *   used for a capture. However, captures that include high-resolution output
 *   buffers ( > 1080p ) have to use the settings as specified (but see below
 *   for processing rate).
 *
 * - Limited-mode devices do not need to support most of the
 *   settings/result/static info metadata. Specifically, only the following settings
 *   are expected to be consumed or produced by a limited-mode HAL device:
 *
 *   android.control.aeAntibandingMode (controls and dynamic)
 *   android.control.aeExposureCompensation (controls and dynamic)
 *   android.control.aeLock (controls and dynamic)
 *   android.control.aeMode (controls and dynamic)
 *   android.control.aeRegions (controls and dynamic)
 *   android.control.aeTargetFpsRange (controls and dynamic)
 *   android.control.aePrecaptureTrigger (controls and dynamic)
 *   android.control.afMode (controls and dynamic)
 *   android.control.afRegions (controls and dynamic)
 *   android.control.awbLock (controls and dynamic)
 *   android.control.awbMode (controls and dynamic)
 *   android.control.awbRegions (controls and dynamic)
 *   android.control.captureIntent (controls and dynamic)
 *   android.control.effectMode (controls and dynamic)
 *   android.control.mode (controls and dynamic)
 *   android.control.sceneMode (controls and dynamic)
 *   android.control.videoStabilizationMode (controls and dynamic)
 *   android.control.aeAvailableAntibandingModes (static)
 *   android.control.aeAvailableModes (static)
 *   android.control.aeAvailableTargetFpsRanges (static)
 *   android.control.aeCompensationRange (static)
 *   android.control.aeCompensationStep (static)
 *   android.control.afAvailableModes (static)
 *   android.control.availableEffects (static)
 *   android.control.availableSceneModes (static)
 *   android.control.availableVideoStabilizationModes (static)
 *   android.control.awbAvailableModes (static)
 *   android.control.maxRegions (static)
 *   android.control.sceneModeOverrides (static)
 *   android.control.aeState (dynamic)
 *   android.control.afState (dynamic)
 *   android.control.awbState (dynamic)
 *
 *   android.flash.mode (controls and dynamic)
 *   android.flash.info.available (static)
 *
 *   android.info.supportedHardwareLevel (static)
 *
 *   android.jpeg.gpsCoordinates (controls and dynamic)
 *   android.jpeg.gpsProcessingMethod (controls and dynamic)
 *   android.jpeg.gpsTimestamp (controls and dynamic)
 *   android.jpeg.orientation (controls and dynamic)
 *   android.jpeg.quality (controls and dynamic)
 *   android.jpeg.thumbnailQuality (controls and dynamic)
 *   android.jpeg.thumbnailSize (controls and dynamic)
 *   android.jpeg.availableThumbnailSizes (static)
 *   android.jpeg.maxSize (static)
 *
 *   android.lens.info.minimumFocusDistance (static)
 *
 *   android.request.id (controls and dynamic)
 *
 *   android.scaler.cropRegion (controls and dynamic)
 *   android.scaler.availableStreamConfigurations (static)
 *   android.scaler.availableMinFrameDurations (static)
 *   android.scaler.availableStallDurations (static)
 *   android.scaler.availableMaxDigitalZoom (static)
 *   android.scaler.maxDigitalZoom (static)
 *   android.scaler.croppingType (static)
 *
 *   android.sensor.orientation (static)
 *   android.sensor.timestamp (dynamic)
 *
 *   android.statistics.faceDetectMode (controls and dynamic)
 *   android.statistics.info.availableFaceDetectModes (static)
 *   android.statistics.faceIds (dynamic)
 *   android.statistics.faceLandmarks (dynamic)
 *   android.statistics.faceRectangles (dynamic)
 *   android.statistics.faceScores (dynamic)
 *
 *   android.sync.frameNumber (dynamic)
 *   android.sync.maxLatency (static)
 *
 * - Captures in limited mode that include high-resolution (> 1080p) output
 *   buffers may block in process_capture_request() until all the output buffers
 *   have been filled. A full-mode HAL device must process sequences of
 *   high-resolution requests at the rate indicated in the static metadata for
 *   that pixel format. The HAL must still call process_capture_result() to
 *   provide the output; the framework must simply be prepared for
 *   process_capture_request() to block until after process_capture_result() for
 *   that request completes for high-resolution captures for limited-mode
 *   devices.
 *
 * - Full-mode devices must support below additional capabilities:
 *   - 30fps at maximum resolution is preferred, more than 20fps is required.
 *   - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL).
 *   - Sensor manual control metadata. See MANUAL_SENSOR defined in
 *     android.request.availableCapabilities.
 *   - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined
 *     in android.request.availableCapabilities.
 *
 */

/**
 * S4. 3A modes and state machines:
 *
 * While the actual 3A algorithms are up to the HAL implementation, a high-level
 * state machine description is defined by the HAL interface, to allow the HAL
 * device and the framework to communicate about the current state of 3A, and to
 * trigger 3A events.
 *
 * When the device is opened, all the individual 3A states must be
 * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked
 * focus must be maintained across the configure() call.
 *
 * Triggering a 3A action involves simply setting the relevant trigger entry in
 * the settings for the next request to indicate start of trigger. For example,
 * the trigger for starting an autofocus scan is setting the entry
 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one
 * request, and cancelling an autofocus scan is triggered by setting
 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise,
 * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each
 * request with a trigger entry set to a non-IDLE value will be treated as an
 * independent triggering event.
 *
 * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which
 * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode
 * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting
 * (ANDROID_CONTROL_USE_SCENE_MODE).
 *
 * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF,
 *   and none of the capture controls may be overridden by the 3A routines.
 *
 * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run
 *   their own independent algorithms, and have their own mode, state, and
 *   trigger metadata entries, as listed in the next section.
 *
 * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must
 *   be used to determine the behavior of 3A routines. In SCENE_MODEs other than
 *   FACE_PRIORITY, the HAL must override the values of
 *   ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected
 *   SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use
 *   CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene
 *   must be ignored for these scene modes.
 *
 * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in
 *   ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering
 *   and focusing on any detected faces in the scene.
 *
 * S4.1. Auto-focus settings and result entries:
 *
 *  Main metadata entries:
 *
 *   ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus
 *      mode. Set by the framework in the request settings.
 *
 *     AF_MODE_OFF: AF is disabled; the framework/app directly controls lens
 *         position.
 *
 *     AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is
 *         triggered.
 *
 *     AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless
 *         AF is triggered.
 *
 *     AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording
 *         video. Triggering immediately locks focus in current
 *         position. Canceling resumes cotinuous focusing.
 *
 *     AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for
 *        zero-shutter-lag still capture. Triggering locks focus once currently
 *        active sweep concludes. Canceling resumes continuous focusing.
 *
 *     AF_MODE_EDOF: Advanced extended depth of field focusing. There is no
 *        autofocus scan, so triggering one or canceling one has no effect.
 *        Images are focused automatically by the HAL.
 *
 *   ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF
 *       algorithm state, reported by the HAL in the result metadata.
 *
 *     AF_STATE_INACTIVE: No focusing has been done, or algorithm was
 *        reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF.
 *        When the device is opened, it must start in this state.
 *
 *     AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning
 *        for good focus. The lens is moving.
 *
 *     AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is
 *        well focused. The lens is not moving. The HAL may spontaneously leave
 *        this state.
 *
 *     AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is
 *        not well focused. The lens is not moving. The HAL may spontaneously
 *        leave this state.
 *
 *     AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.
 *
 *     AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The
 *        lens is not moving.
 *
 *     AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to
 *        focus. The lens is not moving.
 *
 *   ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the
 *       meaning of which is mode- and state- dependent. Set by the framework in
 *       the request settings.
 *
 *     AF_TRIGGER_IDLE: No current trigger.
 *
 *     AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state
 *         dependent.
 *
 *     AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to
 *         default.
 *
 *  Additional metadata entries:
 *
 *   ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV
 *       that should be used to determine good focus. This applies to all AF
 *       modes that scan for focus. Set by the framework in the request
 *       settings.
 *
 * S4.2. Auto-exposure settings and result entries:
 *
 *  Main metadata entries:
 *
 *   ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure
 *       mode. Set by the framework in the request settings.
 *
 *     AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain,
 *         frame duration, and flash.
 *
 *     AE_MODE_ON: Standard autoexposure, with flash control disabled. User may
 *         set flash to fire or to torch mode.
 *
 *     AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's
 *         discretion for precapture and still capture. User control of flash
 *         disabled.
 *
 *     AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired
 *         for capture, and at HAL's discretion for precapture.. User control of
 *         flash disabled.
 *
 *     AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at
 *         HAL's discretion for precapture and still capture. Use a flash burst
 *         at end of precapture sequence to reduce redeye in the final
 *         picture. User control of flash disabled.
 *
 *   ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE
 *       algorithm state, reported by the HAL in the result metadata.
 *
 *     AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is
 *         opened, it must start in this state.
 *
 *     AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting
 *         exposure parameters.
 *
 *     AE_STATE_CONVERGED: AE has found good exposure values for the current
 *         scene, and the exposure parameters are not changing. HAL may
 *         spontaneously leave this state to search for better solution.
 *
 *     AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure
 *         values are not changing.
 *
 *     AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes
 *         flash is required for a sufficiently bright picture. Used for
 *         determining if a zero-shutter-lag frame can be used.
 *
 *     AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture
 *         sequence. Depending on AE mode, this mode may involve firing the
 *         flash for metering, or a burst of flash pulses for redeye reduction.
 *
 *   ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering
 *       sequence before capturing a high-quality image. Set by the framework in
 *       the request settings.
 *
 *      PRECAPTURE_TRIGGER_IDLE: No current trigger.
 *
 *      PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should
 *         use the subsequent requests to measure good exposure/white balance
 *         for an upcoming high-resolution capture.
 *
 *  Additional metadata entries:
 *
 *   ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current
 *       values
 *
 *   ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE
 *       algorithm target brightness point.
 *
 *   ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame
 *       rate range for the AE algorithm. The AE routine cannot change the frame
 *       rate to be outside these bounds.
 *
 *   ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV
 *       that should be used to determine good exposure levels. This applies to
 *       all AE modes besides OFF.
 *
 * S4.3. Auto-whitebalance settings and result entries:
 *
 *  Main metadata entries:
 *
 *   ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance
 *       mode.
 *
 *     AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.
 *
 *     AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color
 *        transform, possibly using more complex transforms than a simple
 *        matrix.
 *
 *     AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor
 *        incandescent (tungsten) lighting, roughly 2700K.
 *
 *     AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent
 *        lighting, roughly 5000K.
 *
 *     AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for
 *        fluorescent lighting, roughly 3000K.
 *
 *     AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight,
 *        roughly 5500K.
 *
 *     AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded
 *        daylight, roughly 6500K.
 *
 *     AWB_MODE_TWILIGHT: Fixed white balance settings good for
 *        near-sunset/sunrise, roughly 15000K.
 *
 *     AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly
 *        lit by the sun, roughly 7500K.
 *
 *   ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB
 *       algorithm state, reported by the HAL in the result metadata.
 *
 *     AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device
 *         is opened, it must start in this state.
 *
 *     AWB_STATE_SEARCHING: AWB is not converged to a good value, and is
 *         changing color adjustment parameters.
 *
 *     AWB_STATE_CONVERGED: AWB has found good color adjustment values for the
 *         current scene, and the parameters are not changing. HAL may
 *         spontaneously leave this state to search for better solution.
 *
 *     AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color
 *         adjustment values are not changing.
 *
 *  Additional metadata entries:
 *
 *   ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to
 *       their current values.
 *
 *   ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV
 *       that should be used to determine good color balance. This applies only
 *       to auto-WB mode.
 *
 * S4.4. General state machine transition notes
 *
 *   Switching between AF, AE, or AWB modes always resets the algorithm's state
 *   to INACTIVE.  Similarly, switching between CONTROL_MODE or
 *   CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the
 *   algorithm states to INACTIVE.
 *
 *   The tables below are per-mode.
 *
 * S4.5. AF state machines
 *
 *                       when enabling AF or changing AF mode
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| Any                | AF mode change| INACTIVE           |                  |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AF_MODE_OFF or AF_MODE_EDOF
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           |               | INACTIVE           | Never changes    |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AF_MODE_AUTO or AF_MODE_MACRO
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | AF_TRIGGER    | ACTIVE_SCAN        | Start AF sweep   |
 *|                    |               |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| ACTIVE_SCAN        | AF sweep done | FOCUSED_LOCKED     | If AF successful |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| ACTIVE_SCAN        | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| ACTIVE_SCAN        | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
 *|                    |               |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
 *|                    |               |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| All states         | mode change   | INACTIVE           |                  |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AF_MODE_CONTINUOUS_VIDEO
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
 *|                    | current scan  |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
 *|                    | current scan  |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
 *|                    |               |                    | if focus is good |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
 *|                    |               |                    | if focus is bad  |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
 *|                    |               |                    | position         |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AF_MODE_CONTINUOUS_PICTURE
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
 *|                    | current scan  |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
 *|                    | current scan  |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Eventual trans.  |
 *|                    |               |                    | once focus good  |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Eventual trans.  |
 *|                    |               |                    | if cannot focus  |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
 *|                    |               |                    | position         |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
 *|                    | new scan      |                    | Lens now moving  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
 *|                    |               |                    | Lens now locked  |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
 *+--------------------+---------------+--------------------+------------------+
 *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
 *+--------------------+---------------+--------------------+------------------+
 *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
 *+--------------------+---------------+--------------------+------------------+
 *
 * S4.6. AE and AWB state machines
 *
 *   The AE and AWB state machines are mostly identical. AE has additional
 *   FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two
 *   states should be ignored for the AWB state machine.
 *
 *                  when enabling AE/AWB or changing AE/AWB mode
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| Any                |  mode change  | INACTIVE           |                  |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AE_MODE_OFF / AWB mode not AUTO
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           |               | INACTIVE           | AE/AWB disabled  |
 *+--------------------+---------------+--------------------+------------------+
 *
 *                            mode = AE_MODE_ON_* / AWB_MODE_AUTO
 *| state              | trans. cause  | new state          | notes            |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | HAL initiates | SEARCHING          |                  |
 *|                    | AE/AWB scan   |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| INACTIVE           | AE/AWB_LOCK   | LOCKED             | values locked    |
 *|                    | on            |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| SEARCHING          | HAL finishes  | CONVERGED          | good values, not |
 *|                    | AE/AWB scan   |                    | changing         |
 *+--------------------+---------------+--------------------+------------------+
 *| SEARCHING          | HAL finishes  | FLASH_REQUIRED     | converged but too|
 *|                    | AE scan       |                    | dark w/o flash   |
 *+--------------------+---------------+--------------------+------------------+
 *| SEARCHING          | AE/AWB_LOCK   | LOCKED             | values locked    |
 *|                    | on            |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| CONVERGED          | HAL initiates | SEARCHING          | values locked    |
 *|                    | AE/AWB scan   |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| CONVERGED          | AE/AWB_LOCK   | LOCKED             | values locked    |
 *|                    | on            |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| FLASH_REQUIRED     | HAL initiates | SEARCHING          | values locked    |
 *|                    | AE/AWB scan   |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| FLASH_REQUIRED     | AE/AWB_LOCK   | LOCKED             | values locked    |
 *|                    | on            |                    |                  |
 *+--------------------+---------------+--------------------+------------------+
 *| LOCKED             | AE/AWB_LOCK   | SEARCHING          | values not good  |
 *|                    | off           |                    | after unlock     |
 *+--------------------+---------------+--------------------+------------------+
 *| LOCKED             | AE/AWB_LOCK   | CONVERGED          | values good      |
 *|                    | off           |                    | after unlock     |
 *+--------------------+---------------+--------------------+------------------+
 *| LOCKED             | AE_LOCK       | FLASH_REQUIRED     | exposure good,   |
 *|                    | off           |                    | but too dark     |
 *+--------------------+---------------+--------------------+------------------+
 *| All AE states      | PRECAPTURE_   | PRECAPTURE         | Start precapture |
 *|                    | START         |                    | sequence         |
 *+--------------------+---------------+--------------------+------------------+
 *| PRECAPTURE         | Sequence done.| CONVERGED          | Ready for high-  |
 *|                    | AE_LOCK off   |                    | quality capture  |
 *+--------------------+---------------+--------------------+------------------+
 *| PRECAPTURE         | Sequence done.| LOCKED             | Ready for high-  |
 *|                    | AE_LOCK on    |                    | quality capture  |
 *+--------------------+---------------+--------------------+------------------+
 *
 */

/**
 * S5. Cropping:
 *
 * Cropping of the full pixel array (for digital zoom and other use cases where
 * a smaller FOV is desirable) is communicated through the
 * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can
 * change on a per-request basis, which is critical for implementing smooth
 * digital zoom.
 *
 * The region is defined as a rectangle (x, y, width, height), with (x, y)
 * describing the top-left corner of the rectangle. The rectangle is defined on
 * the coordinate system of the sensor active pixel array, with (0,0) being the
 * top-left pixel of the active pixel array. Therefore, the width and height
 * cannot be larger than the dimensions reported in the
 * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed
 * width and height are reported by the HAL through the
 * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the
 * maximum supported zoom factor. Therefore, the minimum crop region width and
 * height are:
 *
 * {width, height} =
 *    { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /
 *        ANDROID_SCALER_MAX_DIGITAL_ZOOM),
 *      floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /
 *        ANDROID_SCALER_MAX_DIGITAL_ZOOM) }
 *
 * If the crop region needs to fulfill specific requirements (for example, it
 * needs to start on even coordinates, and its width/height needs to be even),
 * the HAL must do the necessary rounding and write out the final crop region
 * used in the output result metadata. Similarly, if the HAL implements video
 * stabilization, it must adjust the result crop region to describe the region
 * actually included in the output after video stabilization is applied. In
 * general, a camera-using application must be able to determine the field of
 * view it is receiving based on the crop region, the dimensions of the image
 * sensor, and the lens focal length.
 *
 * It is assumed that the cropping is applied after raw to other color space
 * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage,
 * and are not croppable. Therefore, the crop region must be ignored by the HAL
 * for raw streams.
 *
 * Since the crop region applies to all non-raw streams, which may have different aspect
 * ratios than the crop region, the exact sensor region used for each stream may
 * be smaller than the crop region. Specifically, each stream should maintain
 * square pixels and its aspect ratio by minimally further cropping the defined
 * crop region. If the stream's aspect ratio is wider than the crop region, the
 * stream should be further cropped vertically, and if the stream's aspect ratio
 * is narrower than the crop region, the stream should be further cropped
 * horizontally.
 *
 * In all cases, the stream crop must be centered within the full crop region,
 * and each stream is only either cropped horizontally or vertical relative to
 * the full crop region, never both.
 *
 * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a
 * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions
 * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x
 * 1500 pixel array) sensor.
 *
 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
 *
 *   640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)
 *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
 *
 * 0                   1000               2000
 * +---------+---------+---------+----------+
 * | Active pixel array                     |
 * |                                        |
 * |                                        |
 * +         +-------------------+          + 375
 * |         |                   |          |
 * |         O===================O          |
 * |         I 1280x720 stream   I          |
 * +         I                   I          + 750
 * |         I                   I          |
 * |         O===================O          |
 * |         |                   |          |
 * +         +-------------------+          + 1125
 * |          Crop region, 640x480 stream   |
 * |                                        |
 * |                                        |
 * +---------+---------+---------+----------+ 1500
 *
 * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)
 *
 *   640x480 stream crop: (666, 375, 1000, 750) (marked with =)
 *   1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)
 *
 * 0                   1000               2000
 * +---------+---------+---------+----------+
 * | Active pixel array                     |
 * |                                        |
 * |                                        |
 * +         +---O==================O---+   + 375
 * |         |   I 640x480 stream   I   |   |
 * |         |   I                  I   |   |
 * |         |   I                  I   |   |
 * +         |   I                  I   |   + 750
 * |         |   I                  I   |   |
 * |         |   I                  I   |   |
 * |         |   I                  I   |   |
 * +         +---O==================O---+   + 1125
 * |          Crop region, 1280x720 stream  |
 * |                                        |
 * |                                        |
 * +---------+---------+---------+----------+ 1500
 *
 * Crop region: (500, 375, 750, 750) (1:1 aspect ratio)
 *
 *   640x480 stream crop: (500, 469, 750, 562) (marked with =)
 *   1280x720 stream crop: (500, 543, 750, 414) (marged with #)
 *
 * 0                   1000               2000
 * +---------+---------+---------+----------+
 * | Active pixel array                     |
 * |                                        |
 * |                                        |
 * +         +--------------+               + 375
 * |         O==============O               |
 * |         ################               |
 * |         #              #               |
 * +         #              #               + 750
 * |         #              #               |
 * |         ################ 1280x720      |
 * |         O==============O 640x480       |
 * +         +--------------+               + 1125
 * |          Crop region                   |
 * |                                        |
 * |                                        |
 * +---------+---------+---------+----------+ 1500
 *
 * And a final example, a 1024x1024 square aspect ratio stream instead of the
 * 480p stream:
 *
 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
 *
 *   1024x1024 stream crop: (625, 375, 750, 750) (marked with #)
 *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
 *
 * 0                   1000               2000
 * +---------+---------+---------+----------+
 * | Active pixel array                     |
 * |                                        |
 * |              1024x1024 stream          |
 * +         +--###############--+          + 375
 * |         |  #             #  |          |
 * |         O===================O          |
 * |         I 1280x720 stream   I          |
 * +         I                   I          + 750
 * |         I                   I          |
 * |         O===================O          |
 * |         |  #             #  |          |
 * +         +--###############--+          + 1125
 * |          Crop region                   |
 * |                                        |
 * |                                        |
 * +---------+---------+---------+----------+ 1500
 *
 */

/**
 * S6. Error management:
 *
 * Camera HAL device ops functions that have a return value will all return
 * -ENODEV / NULL in case of a serious error. This means the device cannot
 * continue operation, and must be closed by the framework. Once this error is
 * returned by some method, or if notify() is called with ERROR_DEVICE, only
 * the close() method can be called successfully. All other methods will return
 * -ENODEV / NULL.
 *
 * If a device op is called in the wrong sequence, for example if the framework
 * calls configure_streams() is called before initialize(), the device must
 * return -ENOSYS from the call, and do nothing.
 *
 * Transient errors in image capture must be reported through notify() as follows:
 *
 * - The failure of an entire capture to occur must be reported by the HAL by
 *   calling notify() with ERROR_REQUEST. Individual errors for the result
 *   metadata or the output buffers must not be reported in this case.
 *
 * - If the metadata for a capture cannot be produced, but some image buffers
 *   were filled, the HAL must call notify() with ERROR_RESULT.
 *
 * - If an output image buffer could not be filled, but either the metadata was
 *   produced or some other buffers were filled, the HAL must call notify() with
 *   ERROR_BUFFER for each failed buffer.
 *
 * In each of these transient failure cases, the HAL must still call
 * process_capture_result, with valid output and input (if an input buffer was
 * submitted) buffer_handle_t. If the result metadata could not be produced, it
 * should be NULL. If some buffers could not be filled, they must be returned with
 * process_capture_result in the error state, their release fences must be set to
 * the acquire fences passed by the framework, or -1 if they have been waited on by
 * the HAL already.
 *
 * Invalid input arguments result in -EINVAL from the appropriate methods. In
 * that case, the framework must act as if that call had never been made.
 *
 */

/**
 * S7. Key Performance Indicator (KPI) glossary:
 *
 * This includes some critical definitions that are used by KPI metrics.
 *
 * Pipeline Latency:
 *  For a given capture request, the duration from the framework calling
 *  process_capture_request to the HAL sending capture result and all buffers
 *  back by process_capture_result call. To make the Pipeline Latency measure
 *  independent of frame rate, it is measured by frame count.
 *
 *  For example, when frame rate is 30 (fps), the frame duration (time interval
 *  between adjacent frame capture time) is 33 (ms).
 *  If it takes 5 frames for framework to get the result and buffers back for
 *  a given request, then the Pipeline Latency is 5 (frames), instead of
 *  5 x 33 = 165 (ms).
 *
 *  The Pipeline Latency is determined by android.request.pipelineDepth and
 *  android.request.pipelineMaxDepth, see their definitions for more details.
 *
 */

/**
 * S8. Sample Use Cases:
 *
 * This includes some typical use case examples the camera HAL may support.
 *
 * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream.
 *
 *   For this use case, the bidirectional stream will be used by the framework as follows:
 *
 *   1. The framework includes a buffer from this stream as output buffer in a
 *      request as normal.
 *
 *   2. Once the HAL device returns a filled output buffer to the framework,
 *      the framework may do one of two things with the filled buffer:
 *
 *   2. a. The framework uses the filled data, and returns the now-used buffer
 *         to the stream queue for reuse. This behavior exactly matches the
 *         OUTPUT type of stream.
 *
 *   2. b. The framework wants to reprocess the filled data, and uses the
 *         buffer as an input buffer for a request. Once the HAL device has
 *         used the reprocessing buffer, it then returns it to the
 *         framework. The framework then returns the now-used buffer to the
 *         stream queue for reuse.
 *
 *   3. The HAL device will be given the buffer again as an output buffer for
 *        a request at some future point.
 *
 *   For ZSL use case, the pixel format for bidirectional stream will be
 *   HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it
 *   is listed in android.scaler.availableInputOutputFormatsMap. When
 *   HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc
 *   usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL.
 *   A configuration stream list that has BIDIRECTIONAL stream used as input, will
 *   usually also have a distinct OUTPUT stream to get the reprocessing data. For example,
 *   for the ZSL use case, the stream list might be configured with the following:
 *
 *     - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used
 *       as input.
 *     - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream.
 *
 */

/**
 *   S9. Notes on Controls and Metadata
 *
 *   This section contains notes about the interpretation and usage of various metadata tags.
 *
 *   S9.1 HIGH_QUALITY and FAST modes.
 *
 *   Many camera post-processing blocks may be listed as having HIGH_QUALITY,
 *   FAST, and OFF operating modes. These blocks will typically also have an
 *   'available modes' tag representing which of these operating modes are
 *   available on a given device. The general policy regarding implementing
 *   these modes is as follows:
 *
 *   1. Operating mode controls of hardware blocks that cannot be disabled
 *      must not list OFF in their corresponding 'available modes' tags.
 *
 *   2. OFF will always be included in their corresponding 'available modes'
 *      tag if it is possible to disable that hardware block.
 *
 *   3. FAST must always be included in the 'available modes' tags for all
 *      post-processing blocks supported on the device.  If a post-processing
 *      block also has a slower and higher quality operating mode that does
 *      not meet the framerate requirements for FAST mode, HIGH_QUALITY should
 *      be included in the 'available modes' tag to represent this operating
 *      mode.
 */
__BEGIN_DECLS

struct camera3_device;

/**********************************************************************
 *
 * Camera3 stream and stream buffer definitions.
 *
 * These structs and enums define the handles and contents of the input and
 * output streams connecting the HAL to various framework and application buffer
 * consumers. Each stream is backed by a gralloc buffer queue.
 *
 */

/**
 * camera3_stream_type_t:
 *
 * The type of the camera stream, which defines whether the camera HAL device is
 * the producer or the consumer for that stream, and how the buffers of the
 * stream relate to the other streams.
 */
typedef enum camera3_stream_type {
    /**
     * This stream is an output stream; the camera HAL device will be
     * responsible for filling buffers from this stream with newly captured or
     * reprocessed image data.
     */
    CAMERA3_STREAM_OUTPUT = 0,

    /**
     * This stream is an input stream; the camera HAL device will be responsible
     * for reading buffers from this stream and sending them through the camera
     * processing pipeline, as if the buffer was a newly captured image from the
     * imager.
     *
     * The pixel format for input stream can be any format reported by
     * android.scaler.availableInputOutputFormatsMap. The pixel format of the
     * output stream that is used to produce the reprocessing data may be any
     * format reported by android.scaler.availableStreamConfigurations. The
     * supported input/output stream combinations depends the camera device
     * capabilities, see android.scaler.availableInputOutputFormatsMap for
     * stream map details.
     *
     * This kind of stream is generally used to reprocess data into higher
     * quality images (that otherwise would cause a frame rate performance
     * loss), or to do off-line reprocessing.
     *
     */
    CAMERA3_STREAM_INPUT = 1,

    /**
     * This stream can be used for input and output. Typically, the stream is
     * used as an output stream, but occasionally one already-filled buffer may
     * be sent back to the HAL device for reprocessing.
     *
     * This kind of stream is meant generally for Zero Shutter Lag (ZSL)
     * features, where copying the captured image from the output buffer to the
     * reprocessing input buffer would be expensive. See S8.1 for more details.
     *
     * Note that the HAL will always be reprocessing data it produced.
     *
     */
    CAMERA3_STREAM_BIDIRECTIONAL = 2,

    /**
     * Total number of framework-defined stream types
     */
    CAMERA3_NUM_STREAM_TYPES

} camera3_stream_type_t;

/**
 * camera3_stream_t:
 *
 * A handle to a single camera input or output stream. A stream is defined by
 * the framework by its buffer resolution and format, and additionally by the
 * HAL with the gralloc usage flags and the maximum in-flight buffer count.
 *
 * The stream structures are owned by the framework, but pointers to a
 * camera3_stream passed into the HAL by configure_streams() are valid until the
 * end of the first subsequent configure_streams() call that _does not_ include
 * that camera3_stream as an argument, or until the end of the close() call.
 *
 * All camera3_stream framework-controlled members are immutable once the
 * camera3_stream is passed into configure_streams().  The HAL may only change
 * the HAL-controlled parameters during a configure_streams() call, except for
 * the contents of the private pointer.
 *
 * If a configure_streams() call returns a non-fatal error, all active streams
 * remain valid as if configure_streams() had not been called.
 *
 * The endpoint of the stream is not visible to the camera HAL device.
 * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags
 * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream
 * types) see the usage field below.
 */
typedef struct camera3_stream {

    /*****
     * Set by framework before configure_streams()
     */

    /**
     * The type of the stream, one of the camera3_stream_type_t values.
     */
    int stream_type;

    /**
     * The width in pixels of the buffers in this stream
     */
    uint32_t width;

    /**
     * The height in pixels of the buffers in this stream
     */
    uint32_t height;

    /**
     * The pixel format for the buffers in this stream. Format is a value from
     * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or
     * from device-specific headers.
     *
     * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
     * gralloc module will select a format based on the usage flags provided by
     * the camera device and the other endpoint of the stream.
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * The camera HAL device must inspect the buffers handed to it in the
     * subsequent register_stream_buffers() call to obtain the
     * implementation-specific format details, if necessary.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * register_stream_buffers() won't be called by the framework, so the HAL
     * should configure the ISP and sensor pipeline based purely on the sizes,
     * usage flags, and formats for the configured streams.
     */
    int format;

    /*****
     * Set by HAL during configure_streams().
     */

    /**
     * The gralloc usage flags for this stream, as needed by the HAL. The usage
     * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific
     * headers.
     *
     * For output streams, these are the HAL's producer usage flags. For input
     * streams, these are the HAL's consumer usage flags. The usage flags from
     * the producer and the consumer will be combined together and then passed
     * to the platform gralloc HAL module for allocating the gralloc buffers for
     * each stream.
     *
     * Version information:
     *
     * == CAMERA_DEVICE_API_VERSION_3_0:
     *
     *   No initial value guaranteed when passed via configure_streams().
     *   HAL may not use this field as input, and must write over this field
     *   with its usage flags.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_1:
     *
     *   For stream_type OUTPUT and BIDIRECTIONAL, when passed via
     *   configure_streams(), the initial value of this is the consumer's
     *   usage flags.  The HAL may use these consumer flags to decide stream
     *   configuration.
     *   For stream_type INPUT, when passed via configure_streams(), the initial
     *   value of this is 0.
     *   For all streams passed via configure_streams(), the HAL must write
     *   over this field with its usage flags.
     */
    uint32_t usage;

    /**
     * The maximum number of buffers the HAL device may need to have dequeued at
     * the same time. The HAL device may not have more buffers in-flight from
     * this stream than this value.
     */
    uint32_t max_buffers;

    /**
     * A handle to HAL-private information for the stream. Will not be inspected
     * by the framework code.
     */
    void *priv;

} camera3_stream_t;

/**
 * camera3_stream_configuration_t:
 *
 * A structure of stream definitions, used by configure_streams(). This
 * structure defines all the output streams and the reprocessing input
 * stream for the current camera use case.
 */
typedef struct camera3_stream_configuration {
    /**
     * The total number of streams requested by the framework.  This includes
     * both input and output streams. The number of streams will be at least 1,
     * and there will be at least one output-capable stream.
     */
    uint32_t num_streams;

    /**
     * An array of camera stream pointers, defining the input/output
     * configuration for the camera HAL device.
     *
     * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL)
     * in a single configuration.
     *
     * At least one output-capable stream must be defined (OUTPUT or
     * BIDIRECTIONAL).
     */
    camera3_stream_t **streams;

} camera3_stream_configuration_t;

/**
 * camera3_buffer_status_t:
 *
 * The current status of a single stream buffer.
 */
typedef enum camera3_buffer_status {
    /**
     * The buffer is in a normal state, and can be used after waiting on its
     * sync fence.
     */
    CAMERA3_BUFFER_STATUS_OK = 0,

    /**
     * The buffer does not contain valid data, and the data in it should not be
     * used. The sync fence must still be waited on before reusing the buffer.
     */
    CAMERA3_BUFFER_STATUS_ERROR = 1

} camera3_buffer_status_t;

/**
 * camera3_stream_buffer_t:
 *
 * A single buffer from a camera3 stream. It includes a handle to its parent
 * stream, the handle to the gralloc buffer itself, and sync fences
 *
 * The buffer does not specify whether it is to be used for input or output;
 * that is determined by its parent stream type and how the buffer is passed to
 * the HAL device.
 */
typedef struct camera3_stream_buffer {
    /**
     * The handle of the stream this buffer is associated with
     */
    camera3_stream_t *stream;

    /**
     * The native handle to the buffer
     */
    buffer_handle_t *buffer;

    /**
     * Current state of the buffer, one of the camera3_buffer_status_t
     * values. The framework will not pass buffers to the HAL that are in an
     * error state. In case a buffer could not be filled by the HAL, it must
     * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the
     * framework with process_capture_result().
     */
    int status;

    /**
     * The acquire sync fence for this buffer. The HAL must wait on this fence
     * fd before attempting to read from or write to this buffer.
     *
     * The framework may be set to -1 to indicate that no waiting is necessary
     * for this buffer.
     *
     * When the HAL returns an output buffer to the framework with
     * process_capture_result(), the acquire_fence must be set to -1. If the HAL
     * never waits on the acquire_fence due to an error in filling a buffer,
     * when calling process_capture_result() the HAL must set the release_fence
     * of the buffer to be the acquire_fence passed to it by the framework. This
     * will allow the framework to wait on the fence before reusing the buffer.
     *
     * For input buffers, the HAL must not change the acquire_fence field during
     * the process_capture_request() call.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * When the HAL returns an input buffer to the framework with
     * process_capture_result(), the acquire_fence must be set to -1. If the HAL
     * never waits on input buffer acquire fence due to an error, the sync
     * fences should be handled similarly to the way they are handled for output
     * buffers.
     */
     int acquire_fence;

    /**
     * The release sync fence for this buffer. The HAL must set this fence when
     * returning buffers to the framework, or write -1 to indicate that no
     * waiting is required for this buffer.
     *
     * For the output buffers, the fences must be set in the output_buffers
     * array passed to process_capture_result().
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * For the input buffer, the release fence must be set by the
     * process_capture_request() call.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * For the input buffer, the fences must be set in the input_buffer
     * passed to process_capture_result().
     *
     * After signaling the release_fence for this buffer, the HAL
     * should not make any further attempts to access this buffer as the
     * ownership has been fully transferred back to the framework.
     *
     * If a fence of -1 was specified then the ownership of this buffer
     * is transferred back immediately upon the call of process_capture_result.
     */
    int release_fence;

} camera3_stream_buffer_t;

/**
 * camera3_stream_buffer_set_t:
 *
 * The complete set of gralloc buffers for a stream. This structure is given to
 * register_stream_buffers() to allow the camera HAL device to register/map/etc
 * newly allocated stream buffers.
 *
 * >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 * Deprecated (and not used). In particular,
 * register_stream_buffers is also deprecated and will never be invoked.
 *
 */
typedef struct camera3_stream_buffer_set {
    /**
     * The stream handle for the stream these buffers belong to
     */
    camera3_stream_t *stream;

    /**
     * The number of buffers in this stream. It is guaranteed to be at least
     * stream->max_buffers.
     */
    uint32_t num_buffers;

    /**
     * The array of gralloc buffer handles for this stream. If the stream format
     * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device
     * should inspect the passed-in buffers to determine any platform-private
     * pixel format information.
     */
    buffer_handle_t **buffers;

} camera3_stream_buffer_set_t;

/**
 * camera3_jpeg_blob:
 *
 * Transport header for compressed JPEG buffers in output streams.
 *
 * To capture JPEG images, a stream is created using the pixel format
 * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the
 * framework, based on the static metadata field android.jpeg.maxSize. Since
 * compressed JPEG images are of variable size, the HAL needs to include the
 * final size of the compressed image using this structure inside the output
 * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID.
 *
 * Transport header should be at the end of the JPEG output stream buffer. That
 * means the jpeg_blob_id must start at byte[buffer_size -
 * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer.
 * Any HAL using this transport header must account for it in android.jpeg.maxSize
 * The JPEG data itself starts at the beginning of the buffer and should be
 * jpeg_size bytes long.
 */
typedef struct camera3_jpeg_blob {
    uint16_t jpeg_blob_id;
    uint32_t jpeg_size;
} camera3_jpeg_blob_t;

enum {
    CAMERA3_JPEG_BLOB_ID = 0x00FF
};

/**********************************************************************
 *
 * Message definitions for the HAL notify() callback.
 *
 * These definitions are used for the HAL notify callback, to signal
 * asynchronous events from the HAL device to the Android framework.
 *
 */

/**
 * camera3_msg_type:
 *
 * Indicates the type of message sent, which specifies which member of the
 * message union is valid.
 *
 */
typedef enum camera3_msg_type {
    /**
     * An error has occurred. camera3_notify_msg.message.error contains the
     * error information.
     */
    CAMERA3_MSG_ERROR = 1,

    /**
     * The exposure of a given request has
     * begun. camera3_notify_msg.message.shutter contains the information
     * the capture.
     */
    CAMERA3_MSG_SHUTTER = 2,

    /**
     * Number of framework message types
     */
    CAMERA3_NUM_MESSAGES

} camera3_msg_type_t;

/**
 * Defined error codes for CAMERA_MSG_ERROR
 */
typedef enum camera3_error_msg_code {
    /**
     * A serious failure occured. No further frames or buffer streams will
     * be produced by the device. Device should be treated as closed. The
     * client must reopen the device to use it again. The frame_number field
     * is unused.
     */
    CAMERA3_MSG_ERROR_DEVICE = 1,

    /**
     * An error has occurred in processing a request. No output (metadata or
     * buffers) will be produced for this request. The frame_number field
     * specifies which request has been dropped. Subsequent requests are
     * unaffected, and the device remains operational.
     */
    CAMERA3_MSG_ERROR_REQUEST = 2,

    /**
     * An error has occurred in producing an output result metadata buffer
     * for a request, but output stream buffers for it will still be
     * available. Subsequent requests are unaffected, and the device remains
     * operational.  The frame_number field specifies the request for which
     * result metadata won't be available.
     */
    CAMERA3_MSG_ERROR_RESULT = 3,

    /**
     * An error has occurred in placing an output buffer into a stream for a
     * request. The frame metadata and other buffers may still be
     * available. Subsequent requests are unaffected, and the device remains
     * operational. The frame_number field specifies the request for which the
     * buffer was dropped, and error_stream contains a pointer to the stream
     * that dropped the frame.u
     */
    CAMERA3_MSG_ERROR_BUFFER = 4,

    /**
     * Number of error types
     */
    CAMERA3_MSG_NUM_ERRORS

} camera3_error_msg_code_t;

/**
 * camera3_error_msg_t:
 *
 * Message contents for CAMERA3_MSG_ERROR
 */
typedef struct camera3_error_msg {
    /**
     * Frame number of the request the error applies to. 0 if the frame number
     * isn't applicable to the error.
     */
    uint32_t frame_number;

    /**
     * Pointer to the stream that had a failure. NULL if the stream isn't
     * applicable to the error.
     */
    camera3_stream_t *error_stream;

    /**
     * The code for this error; one of the CAMERA_MSG_ERROR enum values.
     */
    int error_code;

} camera3_error_msg_t;

/**
 * camera3_shutter_msg_t:
 *
 * Message contents for CAMERA3_MSG_SHUTTER
 */
typedef struct camera3_shutter_msg {
    /**
     * Frame number of the request that has begun exposure
     */
    uint32_t frame_number;

    /**
     * Timestamp for the start of capture. This must match the capture result
     * metadata's sensor exposure start timestamp.
     */
    uint64_t timestamp;

} camera3_shutter_msg_t;

/**
 * camera3_notify_msg_t:
 *
 * The message structure sent to camera3_callback_ops_t.notify()
 */
typedef struct camera3_notify_msg {

    /**
     * The message type. One of camera3_notify_msg_type, or a private extension.
     */
    int type;

    union {
        /**
         * Error message contents. Valid if type is CAMERA3_MSG_ERROR
         */
        camera3_error_msg_t error;

        /**
         * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER
         */
        camera3_shutter_msg_t shutter;

        /**
         * Generic message contents. Used to ensure a minimum size for custom
         * message types.
         */
        uint8_t generic[32];
    } message;

} camera3_notify_msg_t;

/**********************************************************************
 *
 * Capture request/result definitions for the HAL process_capture_request()
 * method, and the process_capture_result() callback.
 *
 */

/**
 * camera3_request_template_t:
 *
 * Available template types for
 * camera3_device_ops.construct_default_request_settings()
 */
typedef enum camera3_request_template {
    /**
     * Standard camera preview operation with 3A on auto.
     */
    CAMERA3_TEMPLATE_PREVIEW = 1,

    /**
     * Standard camera high-quality still capture with 3A and flash on auto.
     */
    CAMERA3_TEMPLATE_STILL_CAPTURE = 2,

    /**
     * Standard video recording plus preview with 3A on auto, torch off.
     */
    CAMERA3_TEMPLATE_VIDEO_RECORD = 3,

    /**
     * High-quality still capture while recording video. Application will
     * include preview, video record, and full-resolution YUV or JPEG streams in
     * request. Must not cause stuttering on video stream. 3A on auto.
     */
    CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4,

    /**
     * Zero-shutter-lag mode. Application will request preview and
     * full-resolution data for each frame, and reprocess it to JPEG when a
     * still image is requested by user. Settings should provide highest-quality
     * full-resolution images without compromising preview frame rate. 3A on
     * auto.
     */
    CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5,

    /**
     * A basic template for direct application control of capture
     * parameters. All automatic control is disabled (auto-exposure, auto-white
     * balance, auto-focus), and post-processing parameters are set to preview
     * quality. The manual capture parameters (exposure, sensitivity, etc.)
     * are set to reasonable defaults, but should be overridden by the
     * application depending on the intended use case.
     */
    CAMERA3_TEMPLATE_MANUAL = 6,

    /* Total number of templates */
    CAMERA3_TEMPLATE_COUNT,

    /**
     * First value for vendor-defined request templates
     */
    CAMERA3_VENDOR_TEMPLATE_START = 0x40000000

} camera3_request_template_t;

/**
 * camera3_capture_request_t:
 *
 * A single request for image capture/buffer reprocessing, sent to the Camera
 * HAL device by the framework in process_capture_request().
 *
 * The request contains the settings to be used for this capture, and the set of
 * output buffers to write the resulting image data in. It may optionally
 * contain an input buffer, in which case the request is for reprocessing that
 * input buffer instead of capturing a new image with the camera sensor. The
 * capture is identified by the frame_number.
 *
 * In response, the camera HAL device must send a camera3_capture_result
 * structure asynchronously to the framework, using the process_capture_result()
 * callback.
 */
typedef struct camera3_capture_request {
    /**
     * The frame number is an incrementing integer set by the framework to
     * uniquely identify this capture. It needs to be returned in the result
     * call, and is also used to identify the request in asynchronous
     * notifications sent to camera3_callback_ops_t.notify().
     */
    uint32_t frame_number;

    /**
     * The settings buffer contains the capture and processing parameters for
     * the request. As a special case, a NULL settings buffer indicates that the
     * settings are identical to the most-recently submitted capture request. A
     * NULL buffer cannot be used as the first submitted request after a
     * configure_streams() call.
     */
    const camera_metadata_t *settings;

    /**
     * The input stream buffer to use for this request, if any.
     *
     * If input_buffer is NULL, then the request is for a new capture from the
     * imager. If input_buffer is valid, the request is for reprocessing the
     * image contained in input_buffer.
     *
     * In the latter case, the HAL must set the release_fence of the
     * input_buffer to a valid sync fence, or to -1 if the HAL does not support
     * sync, before process_capture_request() returns.
     *
     * The HAL is required to wait on the acquire sync fence of the input buffer
     * before accessing it.
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * Any input buffer included here will have been registered with the HAL
     * through register_stream_buffers() before its inclusion in a request.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * The buffers will not have been pre-registered with the HAL.
     * Subsequent requests may reuse buffers, or provide entirely new buffers.
     */
    camera3_stream_buffer_t *input_buffer;

    /**
     * The number of output buffers for this capture request. Must be at least
     * 1.
     */
    uint32_t num_output_buffers;

    /**
     * An array of num_output_buffers stream buffers, to be filled with image
     * data from this capture/reprocess. The HAL must wait on the acquire fences
     * of each stream buffer before writing to them.
     *
     * The HAL takes ownership of the actual buffer_handle_t entries in
     * output_buffers; the framework does not access them until they are
     * returned in a camera3_capture_result_t.
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * All the buffers included  here will have been registered with the HAL
     * through register_stream_buffers() before their inclusion in a request.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * Any or all of the buffers included here may be brand new in this
     * request (having never before seen by the HAL).
     */
    const camera3_stream_buffer_t *output_buffers;

} camera3_capture_request_t;

/**
 * camera3_capture_result_t:
 *
 * The result of a single capture/reprocess by the camera HAL device. This is
 * sent to the framework asynchronously with process_capture_result(), in
 * response to a single capture request sent to the HAL with
 * process_capture_request(). Multiple process_capture_result() calls may be
 * performed by the HAL for each request.
 *
 * Each call, all with the same frame
 * number, may contain some subset of the output buffers, and/or the result
 * metadata. The metadata may only be provided once for a given frame number;
 * all other calls must set the result metadata to NULL.
 *
 * The result structure contains the output metadata from this capture, and the
 * set of output buffers that have been/will be filled for this capture. Each
 * output buffer may come with a release sync fence that the framework will wait
 * on before reading, in case the buffer has not yet been filled by the HAL.
 *
 * >= CAMERA_DEVICE_API_VERSION_3_2:
 *
 * The metadata may be provided multiple times for a single frame number. The
 * framework will accumulate together the final result set by combining each
 * partial result together into the total result set.
 *
 * If an input buffer is given in a request, the HAL must return it in one of
 * the process_capture_result calls, and the call may be to just return the input
 * buffer, without metadata and output buffers; the sync fences must be handled
 * the same way they are done for output buffers.
 *
 *
 * Performance considerations:
 *
 * Applications will also receive these partial results immediately, so sending
 * partial results is a highly recommended performance optimization to avoid
 * the total pipeline latency before sending the results for what is known very
 * early on in the pipeline.
 *
 * A typical use case might be calculating the AF state halfway through the
 * pipeline; by sending the state back to the framework immediately, we get a
 * 50% performance increase and perceived responsiveness of the auto-focus.
 *
 */
typedef struct camera3_capture_result {
    /**
     * The frame number is an incrementing integer set by the framework in the
     * submitted request to uniquely identify this capture. It is also used to
     * identify the request in asynchronous notifications sent to
     * camera3_callback_ops_t.notify().
    */
    uint32_t frame_number;

    /**
     * The result metadata for this capture. This contains information about the
     * final capture parameters, the state of the capture and post-processing
     * hardware, the state of the 3A algorithms, if enabled, and the output of
     * any enabled statistics units.
     *
     * Only one call to process_capture_result() with a given frame_number may
     * include the result metadata. All other calls for the same frame_number
     * must set this to NULL.
     *
     * If there was an error producing the result metadata, result must be an
     * empty metadata buffer, and notify() must be called with ERROR_RESULT.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * Multiple calls to process_capture_result() with a given frame_number
     * may include the result metadata.
     *
     * Partial metadata submitted should not include any metadata key returned
     * in a previous partial result for a given frame. Each new partial result
     * for that frame must also set a distinct partial_result value.
     *
     * If notify has been called with ERROR_RESULT, all further partial
     * results for that frame are ignored by the framework.
     */
    const camera_metadata_t *result;

    /**
     * The number of output buffers returned in this result structure. Must be
     * less than or equal to the matching capture request's count. If this is
     * less than the buffer count in the capture request, at least one more call
     * to process_capture_result with the same frame_number must be made, to
     * return the remaining output buffers to the framework. This may only be
     * zero if the structure includes valid result metadata or an input buffer
     * is returned in this result.
     */
    uint32_t num_output_buffers;

    /**
     * The handles for the output stream buffers for this capture. They may not
     * yet be filled at the time the HAL calls process_capture_result(); the
     * framework will wait on the release sync fences provided by the HAL before
     * reading the buffers.
     *
     * The HAL must set the stream buffer's release sync fence to a valid sync
     * fd, or to -1 if the buffer has already been filled.
     *
     * If the HAL encounters an error while processing the buffer, and the
     * buffer is not filled, the buffer's status field must be set to
     * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence
     * before encountering the error, the acquire fence should be copied into
     * the release fence, to allow the framework to wait on the fence before
     * reusing the buffer.
     *
     * The acquire fence must be set to -1 for all output buffers.  If
     * num_output_buffers is zero, this may be NULL. In that case, at least one
     * more process_capture_result call must be made by the HAL to provide the
     * output buffers.
     *
     * When process_capture_result is called with a new buffer for a frame,
     * all previous frames' buffers for that corresponding stream must have been
     * already delivered (the fences need not have yet been signaled).
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * Gralloc buffers for a frame may be sent to framework before the
     * corresponding SHUTTER-notify.
     *
     * Performance considerations:
     *
     * Buffers delivered to the framework will not be dispatched to the
     * application layer until a start of exposure timestamp has been received
     * via a SHUTTER notify() call. It is highly recommended to
     * dispatch that call as early as possible.
     */
     const camera3_stream_buffer_t *output_buffers;

     /**
      * >= CAMERA_DEVICE_API_VERSION_3_2:
      *
      * The handle for the input stream buffer for this capture. It may not
      * yet be consumed at the time the HAL calls process_capture_result(); the
      * framework will wait on the release sync fences provided by the HAL before
      * reusing the buffer.
      *
      * The HAL should handle the sync fences the same way they are done for
      * output_buffers.
      *
      * Only one input buffer is allowed to be sent per request. Similarly to
      * output buffers, the ordering of returned input buffers must be
      * maintained by the HAL.
      *
      * Performance considerations:
      *
      * The input buffer should be returned as early as possible. If the HAL
      * supports sync fences, it can call process_capture_result to hand it back
      * with sync fences being set appropriately. If the sync fences are not
      * supported, the buffer can only be returned when it is consumed, which
      * may take long time; the HAL may choose to copy this input buffer to make
      * the buffer return sooner.
      */
      const camera3_stream_buffer_t *input_buffer;

     /**
      * >= CAMERA_DEVICE_API_VERSION_3_2:
      *
      * In order to take advantage of partial results, the HAL must set the
      * static metadata android.request.partialResultCount to the number of
      * partial results it will send for each frame.
      *
      * Each new capture result with a partial result must set
      * this field (partial_result) to a distinct inclusive value between
      * 1 and android.request.partialResultCount.
      *
      * HALs not wishing to take advantage of this feature must not
      * set an android.request.partialResultCount or partial_result to a value
      * other than 1.
      *
      * This value must be set to 0 when a capture result contains buffers only
      * and no metadata.
      */
     uint32_t partial_result;

} camera3_capture_result_t;

/**********************************************************************
 *
 * Callback methods for the HAL to call into the framework.
 *
 * These methods are used to return metadata and image buffers for a completed
 * or failed captures, and to notify the framework of asynchronous events such
 * as errors.
 *
 * The framework will not call back into the HAL from within these callbacks,
 * and these calls will not block for extended periods.
 *
 */
typedef struct camera3_callback_ops {

    /**
     * process_capture_result:
     *
     * Send results from a completed capture to the framework.
     * process_capture_result() may be invoked multiple times by the HAL in
     * response to a single capture request. This allows, for example, the
     * metadata and low-resolution buffers to be returned in one call, and
     * post-processed JPEG buffers in a later call, once it is available. Each
     * call must include the frame number of the request it is returning
     * metadata or buffers for.
     *
     * A component (buffer or metadata) of the complete result may only be
     * included in one process_capture_result call. A buffer for each stream,
     * and the result metadata, must be returned by the HAL for each request in
     * one of the process_capture_result calls, even in case of errors producing
     * some of the output. A call to process_capture_result() with neither
     * output buffers or result metadata is not allowed.
     *
     * The order of returning metadata and buffers for a single result does not
     * matter, but buffers for a given stream must be returned in FIFO order. So
     * the buffer for request 5 for stream A must always be returned before the
     * buffer for request 6 for stream A. This also applies to the result
     * metadata; the metadata for request 5 must be returned before the metadata
     * for request 6.
     *
     * However, different streams are independent of each other, so it is
     * acceptable and expected that the buffer for request 5 for stream A may be
     * returned after the buffer for request 6 for stream B is. And it is
     * acceptable that the result metadata for request 6 for stream B is
     * returned before the buffer for request 5 for stream A is.
     *
     * The HAL retains ownership of result structure, which only needs to be
     * valid to access during this call. The framework will copy whatever it
     * needs before this call returns.
     *
     * The output buffers do not need to be filled yet; the framework will wait
     * on the stream buffer release sync fence before reading the buffer
     * data. Therefore, this method should be called by the HAL as soon as
     * possible, even if some or all of the output buffers are still in
     * being filled. The HAL must include valid release sync fences into each
     * output_buffers stream buffer entry, or -1 if that stream buffer is
     * already filled.
     *
     * If the result buffer cannot be constructed for a request, the HAL should
     * return an empty metadata buffer, but still provide the output buffers and
     * their sync fences. In addition, notify() must be called with an
     * ERROR_RESULT message.
     *
     * If an output buffer cannot be filled, its status field must be set to
     * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER
     * message.
     *
     * If the entire capture has failed, then this method still needs to be
     * called to return the output buffers to the framework. All the buffer
     * statuses should be STATUS_ERROR, and the result metadata should be an
     * empty buffer. In addition, notify() must be called with a ERROR_REQUEST
     * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages
     * should not be sent.
     *
     * Performance requirements:
     *
     * This is a non-blocking call. The framework will return this call in 5ms.
     *
     * The pipeline latency (see S7 for definition) should be less than or equal to
     * 4 frame intervals, and must be less than or equal to 8 frame intervals.
     *
     */
    void (*process_capture_result)(const struct camera3_callback_ops *,
            const camera3_capture_result_t *result);

    /**
     * notify:
     *
     * Asynchronous notification callback from the HAL, fired for various
     * reasons. Only for information independent of frame capture, or that
     * require specific timing. The ownership of the message structure remains
     * with the HAL, and the msg only needs to be valid for the duration of this
     * call.
     *
     * Multiple threads may call notify() simultaneously.
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * The notification for the start of exposure for a given request must be
     * sent by the HAL before the first call to process_capture_result() for
     * that request is made.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * Buffers delivered to the framework will not be dispatched to the
     * application layer until a start of exposure timestamp has been received
     * via a SHUTTER notify() call. It is highly recommended to
     * dispatch this call as early as possible.
     *
     * ------------------------------------------------------------------------
     * Performance requirements:
     *
     * This is a non-blocking call. The framework will return this call in 5ms.
     */
    void (*notify)(const struct camera3_callback_ops *,
            const camera3_notify_msg_t *msg);

} camera3_callback_ops_t;

/**********************************************************************
 *
 * Camera device operations
 *
 */
typedef struct camera3_device_ops {

    /**
     * initialize:
     *
     * One-time initialization to pass framework callback function pointers to
     * the HAL. Will be called once after a successful open() call, before any
     * other functions are called on the camera3_device_ops structure.
     *
     * Performance requirements:
     *
     * This should be a non-blocking call. The HAL should return from this call
     * in 5ms, and must return from this call in 10ms.
     *
     * Return values:
     *
     *  0:     On successful initialization
     *
     * -ENODEV: If initialization fails. Only close() can be called successfully
     *          by the framework after this.
     */
    int (*initialize)(const struct camera3_device *,
            const camera3_callback_ops_t *callback_ops);

    /**********************************************************************
     * Stream management
     */

    /**
     * configure_streams:
     *
     * CAMERA_DEVICE_API_VERSION_3_0 only:
     *
     * Reset the HAL camera device processing pipeline and set up new input and
     * output streams. This call replaces any existing stream configuration with
     * the streams defined in the stream_list. This method will be called at
     * least once after initialize() before a request is submitted with
     * process_capture_request().
     *
     * The stream_list must contain at least one output-capable stream, and may
     * not contain more than one input-capable stream.
     *
     * The stream_list may contain streams that are also in the currently-active
     * set of streams (from the previous call to configure_stream()). These
     * streams will already have valid values for usage, max_buffers, and the
     * private pointer.
     *
     * If such a stream has already had its buffers registered,
     * register_stream_buffers() will not be called again for the stream, and
     * buffers from the stream can be immediately included in input requests.
     *
     * If the HAL needs to change the stream configuration for an existing
     * stream due to the new configuration, it may rewrite the values of usage
     * and/or max_buffers during the configure call.
     *
     * The framework will detect such a change, and will then reallocate the
     * stream buffers, and call register_stream_buffers() again before using
     * buffers from that stream in a request.
     *
     * If a currently-active stream is not included in stream_list, the HAL may
     * safely remove any references to that stream. It will not be reused in a
     * later configure() call by the framework, and all the gralloc buffers for
     * it will be freed after the configure_streams() call returns.
     *
     * The stream_list structure is owned by the framework, and may not be
     * accessed once this call completes. The address of an individual
     * camera3_stream_t structure will remain valid for access by the HAL until
     * the end of the first configure_stream() call which no longer includes
     * that camera3_stream_t in the stream_list argument. The HAL may not change
     * values in the stream structure outside of the private pointer, except for
     * the usage and max_buffers members during the configure_streams() call
     * itself.
     *
     * If the stream is new, the usage, max_buffer, and private pointer fields
     * of the stream structure will all be set to 0. The HAL device must set
     * these fields before the configure_streams() call returns. These fields
     * are then used by the framework and the platform gralloc module to
     * allocate the gralloc buffers for each stream.
     *
     * Before such a new stream can have its buffers included in a capture
     * request, the framework will call register_stream_buffers() with that
     * stream. However, the framework is not required to register buffers for
     * _all_ streams before submitting a request. This allows for quick startup
     * of (for example) a preview stream, with allocation for other streams
     * happening later or concurrently.
     *
     * ------------------------------------------------------------------------
     * CAMERA_DEVICE_API_VERSION_3_1 only:
     *
     * Reset the HAL camera device processing pipeline and set up new input and
     * output streams. This call replaces any existing stream configuration with
     * the streams defined in the stream_list. This method will be called at
     * least once after initialize() before a request is submitted with
     * process_capture_request().
     *
     * The stream_list must contain at least one output-capable stream, and may
     * not contain more than one input-capable stream.
     *
     * The stream_list may contain streams that are also in the currently-active
     * set of streams (from the previous call to configure_stream()). These
     * streams will already have valid values for usage, max_buffers, and the
     * private pointer.
     *
     * If such a stream has already had its buffers registered,
     * register_stream_buffers() will not be called again for the stream, and
     * buffers from the stream can be immediately included in input requests.
     *
     * If the HAL needs to change the stream configuration for an existing
     * stream due to the new configuration, it may rewrite the values of usage
     * and/or max_buffers during the configure call.
     *
     * The framework will detect such a change, and will then reallocate the
     * stream buffers, and call register_stream_buffers() again before using
     * buffers from that stream in a request.
     *
     * If a currently-active stream is not included in stream_list, the HAL may
     * safely remove any references to that stream. It will not be reused in a
     * later configure() call by the framework, and all the gralloc buffers for
     * it will be freed after the configure_streams() call returns.
     *
     * The stream_list structure is owned by the framework, and may not be
     * accessed once this call completes. The address of an individual
     * camera3_stream_t structure will remain valid for access by the HAL until
     * the end of the first configure_stream() call which no longer includes
     * that camera3_stream_t in the stream_list argument. The HAL may not change
     * values in the stream structure outside of the private pointer, except for
     * the usage and max_buffers members during the configure_streams() call
     * itself.
     *
     * If the stream is new, max_buffer, and private pointer fields of the
     * stream structure will all be set to 0. The usage will be set to the
     * consumer usage flags. The HAL device must set these fields before the
     * configure_streams() call returns. These fields are then used by the
     * framework and the platform gralloc module to allocate the gralloc
     * buffers for each stream.
     *
     * Before such a new stream can have its buffers included in a capture
     * request, the framework will call register_stream_buffers() with that
     * stream. However, the framework is not required to register buffers for
     * _all_ streams before submitting a request. This allows for quick startup
     * of (for example) a preview stream, with allocation for other streams
     * happening later or concurrently.
     *
     * ------------------------------------------------------------------------
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * Reset the HAL camera device processing pipeline and set up new input and
     * output streams. This call replaces any existing stream configuration with
     * the streams defined in the stream_list. This method will be called at
     * least once after initialize() before a request is submitted with
     * process_capture_request().
     *
     * The stream_list must contain at least one output-capable stream, and may
     * not contain more than one input-capable stream.
     *
     * The stream_list may contain streams that are also in the currently-active
     * set of streams (from the previous call to configure_stream()). These
     * streams will already have valid values for usage, max_buffers, and the
     * private pointer.
     *
     * If the HAL needs to change the stream configuration for an existing
     * stream due to the new configuration, it may rewrite the values of usage
     * and/or max_buffers during the configure call.
     *
     * The framework will detect such a change, and may then reallocate the
     * stream buffers before using buffers from that stream in a request.
     *
     * If a currently-active stream is not included in stream_list, the HAL may
     * safely remove any references to that stream. It will not be reused in a
     * later configure() call by the framework, and all the gralloc buffers for
     * it will be freed after the configure_streams() call returns.
     *
     * The stream_list structure is owned by the framework, and may not be
     * accessed once this call completes. The address of an individual
     * camera3_stream_t structure will remain valid for access by the HAL until
     * the end of the first configure_stream() call which no longer includes
     * that camera3_stream_t in the stream_list argument. The HAL may not change
     * values in the stream structure outside of the private pointer, except for
     * the usage and max_buffers members during the configure_streams() call
     * itself.
     *
     * If the stream is new, max_buffer, and private pointer fields of the
     * stream structure will all be set to 0. The usage will be set to the
     * consumer usage flags. The HAL device must set these fields before the
     * configure_streams() call returns. These fields are then used by the
     * framework and the platform gralloc module to allocate the gralloc
     * buffers for each stream.
     *
     * Newly allocated buffers may be included in a capture request at any time
     * by the framework. Once a gralloc buffer is returned to the framework
     * with process_capture_result (and its respective release_fence has been
     * signaled) the framework may free or reuse it at any time.
     *
     * ------------------------------------------------------------------------
     *
     * Preconditions:
     *
     * The framework will only call this method when no captures are being
     * processed. That is, all results have been returned to the framework, and
     * all in-flight input and output buffers have been returned and their
     * release sync fences have been signaled by the HAL. The framework will not
     * submit new requests for capture while the configure_streams() call is
     * underway.
     *
     * Postconditions:
     *
     * The HAL device must configure itself to provide maximum possible output
     * frame rate given the sizes and formats of the output streams, as
     * documented in the camera device's static metadata.
     *
     * Performance requirements:
     *
     * This call is expected to be heavyweight and possibly take several hundred
     * milliseconds to complete, since it may require resetting and
     * reconfiguring the image sensor and the camera processing pipeline.
     * Nevertheless, the HAL device should attempt to minimize the
     * reconfiguration delay to minimize the user-visible pauses during
     * application operational mode changes (such as switching from still
     * capture to video recording).
     *
     * The HAL should return from this call in 500ms, and must return from this
     * call in 1000ms.
     *
     * Return values:
     *
     *  0:      On successful stream configuration
     *
     * -EINVAL: If the requested stream configuration is invalid. Some examples
     *          of invalid stream configurations include:
     *
     *          - Including more than 1 input-capable stream (INPUT or
     *            BIDIRECTIONAL)
     *
     *          - Not including any output-capable streams (OUTPUT or
     *            BIDIRECTIONAL)
     *
     *          - Including streams with unsupported formats, or an unsupported
     *            size for that format.
     *
     *          - Including too many output streams of a certain format.
     *
     *          Note that the framework submitting an invalid stream
     *          configuration is not normal operation, since stream
     *          configurations are checked before configure. An invalid
     *          configuration means that a bug exists in the framework code, or
     *          there is a mismatch between the HAL's static metadata and the
     *          requirements on streams.
     *
     * -ENODEV: If there has been a fatal error and the device is no longer
     *          operational. Only close() can be called successfully by the
     *          framework after this error is returned.
     */
    int (*configure_streams)(const struct camera3_device *,
            camera3_stream_configuration_t *stream_list);

    /**
     * register_stream_buffers:
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * DEPRECATED. This will not be called and must be set to NULL.
     *
     * <= CAMERA_DEVICE_API_VERSION_3_1:
     *
     * Register buffers for a given stream with the HAL device. This method is
     * called by the framework after a new stream is defined by
     * configure_streams, and before buffers from that stream are included in a
     * capture request. If the same stream is listed in a subsequent
     * configure_streams() call, register_stream_buffers will _not_ be called
     * again for that stream.
     *
     * The framework does not need to register buffers for all configured
     * streams before it submits the first capture request. This allows quick
     * startup for preview (or similar use cases) while other streams are still
     * being allocated.
     *
     * This method is intended to allow the HAL device to map or otherwise
     * prepare the buffers for later use. The buffers passed in will already be
     * locked for use. At the end of the call, all the buffers must be ready to
     * be returned to the stream.  The buffer_set argument is only valid for the
     * duration of this call.
     *
     * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
     * the camera HAL should inspect the passed-in buffers here to determine any
     * platform-private pixel format information.
     *
     * Performance requirements:
     *
     * This should be a non-blocking call. The HAL should return from this call
     * in 1ms, and must return from this call in 5ms.
     *
     * Return values:
     *
     *  0:      On successful registration of the new stream buffers
     *
     * -EINVAL: If the stream_buffer_set does not refer to a valid active
     *          stream, or if the buffers array is invalid.
     *
     * -ENOMEM: If there was a failure in registering the buffers. The framework
     *          must consider all the stream buffers to be unregistered, and can
     *          try to register again later.
     *
     * -ENODEV: If there is a fatal error, and the device is no longer
     *          operational. Only close() can be called successfully by the
     *          framework after this error is returned.
     */
    int (*register_stream_buffers)(const struct camera3_device *,
            const camera3_stream_buffer_set_t *buffer_set);

    /**********************************************************************
     * Request creation and submission
     */

    /**
     * construct_default_request_settings:
     *
     * Create capture settings for standard camera use cases.
     *
     * The device must return a settings buffer that is configured to meet the
     * requested use case, which must be one of the CAMERA3_TEMPLATE_*
     * enums. All request control fields must be included.
     *
     * The HAL retains ownership of this structure, but the pointer to the
     * structure must be valid until the device is closed. The framework and the
     * HAL may not modify the buffer once it is returned by this call. The same
     * buffer may be returned for subsequent calls for the same template, or for
     * other templates.
     *
     * Performance requirements:
     *
     * This should be a non-blocking call. The HAL should return from this call
     * in 1ms, and must return from this call in 5ms.
     *
     * Return values:
     *
     *   Valid metadata: On successful creation of a default settings
     *                   buffer.
     *
     *   NULL:           In case of a fatal error. After this is returned, only
     *                   the close() method can be called successfully by the
     *                   framework.
     */
    const camera_metadata_t* (*construct_default_request_settings)(
            const struct camera3_device *,
            int type);

    /**
     * process_capture_request:
     *
     * Send a new capture request to the HAL. The HAL should not return from
     * this call until it is ready to accept the next request to process. Only
     * one call to process_capture_request() will be made at a time by the
     * framework, and the calls will all be from the same thread. The next call
     * to process_capture_request() will be made as soon as a new request and
     * its associated buffers are available. In a normal preview scenario, this
     * means the function will be called again by the framework almost
     * instantly.
     *
     * The actual request processing is asynchronous, with the results of
     * capture being returned by the HAL through the process_capture_result()
     * call. This call requires the result metadata to be available, but output
     * buffers may simply provide sync fences to wait on. Multiple requests are
     * expected to be in flight at once, to maintain full output frame rate.
     *
     * The framework retains ownership of the request structure. It is only
     * guaranteed to be valid during this call. The HAL device must make copies
     * of the information it needs to retain for the capture processing. The HAL
     * is responsible for waiting on and closing the buffers' fences and
     * returning the buffer handles to the framework.
     *
     * The HAL must write the file descriptor for the input buffer's release
     * sync fence into input_buffer->release_fence, if input_buffer is not
     * NULL. If the HAL returns -1 for the input buffer release sync fence, the
     * framework is free to immediately reuse the input buffer. Otherwise, the
     * framework will wait on the sync fence before refilling and reusing the
     * input buffer.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *
     * The input/output buffers provided by the framework in each request
     * may be brand new (having never before seen by the HAL).
     *
     * ------------------------------------------------------------------------
     * Performance considerations:
     *
     * Handling a new buffer should be extremely lightweight and there should be
     * no frame rate degradation or frame jitter introduced.
     *
     * This call must return fast enough to ensure that the requested frame
     * rate can be sustained, especially for streaming cases (post-processing
     * quality settings set to FAST). The HAL should return this call in 1
     * frame interval, and must return from this call in 4 frame intervals.
     *
     * Return values:
     *
     *  0:      On a successful start to processing the capture request
     *
     * -EINVAL: If the input is malformed (the settings are NULL when not
     *          allowed, there are 0 output buffers, etc) and capture processing
     *          cannot start. Failures during request processing should be
     *          handled by calling camera3_callback_ops_t.notify(). In case of
     *          this error, the framework will retain responsibility for the
     *          stream buffers' fences and the buffer handles; the HAL should
     *          not close the fences or return these buffers with
     *          process_capture_result.
     *
     * -ENODEV: If the camera device has encountered a serious error. After this
     *          error is returned, only the close() method can be successfully
     *          called by the framework.
     *
     */
    int (*process_capture_request)(const struct camera3_device *,
            camera3_capture_request_t *request);

    /**********************************************************************
     * Miscellaneous methods
     */

    /**
     * get_metadata_vendor_tag_ops:
     *
     * Get methods to query for vendor extension metadata tag information. The
     * HAL should fill in all the vendor tag operation methods, or leave ops
     * unchanged if no vendor tags are defined.
     *
     * The definition of vendor_tag_query_ops_t can be found in
     * system/media/camera/include/system/camera_metadata.h.
     *
     * >= CAMERA_DEVICE_API_VERSION_3_2:
     *    DEPRECATED. This function has been deprecated and should be set to
     *    NULL by the HAL.  Please implement get_vendor_tag_ops in camera_common.h
     *    instead.
     */
    void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,
            vendor_tag_query_ops_t* ops);

    /**
     * dump:
     *
     * Print out debugging state for the camera device. This will be called by
     * the framework when the camera service is asked for a debug dump, which
     * happens when using the dumpsys tool, or when capturing a bugreport.
     *
     * The passed-in file descriptor can be used to write debugging text using
     * dprintf() or write(). The text should be in ASCII encoding only.
     *
     * Performance requirements:
     *
     * This must be a non-blocking call. The HAL should return from this call
     * in 1ms, must return from this call in 10ms. This call must avoid
     * deadlocks, as it may be called at any point during camera operation.
     * Any synchronization primitives used (such as mutex locks or semaphores)
     * should be acquired with a timeout.
     */
    void (*dump)(const struct camera3_device *, int fd);

    /**
     * flush:
     *
     * Flush all currently in-process captures and all buffers in the pipeline
     * on the given device. The framework will use this to dump all state as
     * quickly as possible in order to prepare for a configure_streams() call.
     *
     * No buffers are required to be successfully returned, so every buffer
     * held at the time of flush() (whether successfully filled or not) may be
     * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed
     * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call,
     * provided they are successfully filled.
     *
     * All requests currently in the HAL are expected to be returned as soon as
     * possible.  Not-in-process requests should return errors immediately. Any
     * interruptible hardware blocks should be stopped, and any uninterruptible
     * blocks should be waited on.
     *
     * More specifically, the HAL must follow below requirements for various cases:
     *
     * 1. For captures that are too late for the HAL to cancel/stop, and will be
     *    completed normally by the HAL; i.e. the HAL can send shutter/notify and
     *    process_capture_result and buffers as normal.
     *
     * 2. For pending requests that have not done any processing, the HAL must call notify
     *    CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with
     *    process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR).
     *    The HAL must not place the release fence into an error state, instead,
     *    the release fences must be set to the acquire fences passed by the framework,
     *    or -1 if they have been waited on by the HAL already. This is also the path
     *    to follow for any captures for which the HAL already called notify() with
     *    CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for.
     *    After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with
     *    buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or
     *    process_capture_result with non-null metadata is allowed.
     *
     * 3. For partially completed pending requests that will not have all the output
     *    buffers or perhaps missing metadata, the HAL should follow below:
     *
     *    3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result
     *    metadata (i.e. one or more partial metadata) won't be available for the capture.
     *
     *    3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't
     *         be produced for the capture.
     *
     *    3.3  Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before
     *         any buffers/metadata are returned with process_capture_result.
     *
     *    3.4 For captures that will produce some results, the HAL must not call
     *        CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure.
     *
     *    3.5. Valid buffers/metadata should be passed to the framework as normal.
     *
     *    3.6. Failed buffers should be returned to the framework as described for case 2.
     *         But failed buffers do not have to follow the strict ordering valid buffers do,
     *         and may be out-of-order with respect to valid buffers. For example, if buffers
     *         A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable
     *         return order.
     *
     *    3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no
     *         need to call process_capture_result with NULL metadata or equivalent.
     *
     * flush() should only return when there are no more outstanding buffers or
     * requests left in the HAL. The framework may call configure_streams (as
     * the HAL state is now quiesced) or may issue new requests.
     *
     * Note that it's sufficient to only support fully-succeeded and fully-failed result cases.
     * However, it is highly desirable to support the partial failure cases as well, as it
     * could help improve the flush call overall performance.
     *
     * Performance requirements:
     *
     * The HAL should return from this call in 100ms, and must return from this
     * call in 1000ms. And this call must not be blocked longer than pipeline
     * latency (see S7 for definition).
     *
     * Version information:
     *
     *   only available if device version >= CAMERA_DEVICE_API_VERSION_3_1.
     *
     * Return values:
     *
     *  0:      On a successful flush of the camera HAL.
     *
     * -EINVAL: If the input is malformed (the device is not valid).
     *
     * -ENODEV: If the camera device has encountered a serious error. After this
     *          error is returned, only the close() method can be successfully
     *          called by the framework.
     */
    int (*flush)(const struct camera3_device *);

    /* reserved for future use */
    void *reserved[8];
} camera3_device_ops_t;

/**********************************************************************
 *
 * Camera device definition
 *
 */
typedef struct camera3_device {
    /**
     * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this
     * device as implementing version 3.0 of the camera device HAL.
     *
     * Performance requirements:
     *
     * Camera open (common.module->common.methods->open) should return in 200ms, and must return
     * in 500ms.
     * Camera close (common.close) should return in 200ms, and must return in 500ms.
     *
     */
    hw_device_t common;
    camera3_device_ops_t *ops;
    void *priv;
} camera3_device_t;

__END_DECLS

#endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */