summaryrefslogtreecommitdiff
path: root/python/openvino/runtime/streaming/streaming_inference_app
diff options
context:
space:
mode:
Diffstat (limited to 'python/openvino/runtime/streaming/streaming_inference_app')
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/CMakeLists.txt29
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/categories.txt1001
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/command_line.cpp72
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/command_line.h31
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.cpp413
-rw-r--r--python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.h74
6 files changed, 1620 insertions, 0 deletions
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/CMakeLists.txt b/python/openvino/runtime/streaming/streaming_inference_app/CMakeLists.txt
new file mode 100644
index 0000000..ec9cc4f
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/CMakeLists.txt
@@ -0,0 +1,29 @@
+# Copyright 2023 Intel Corporation
+#
+# This software and the related documents are Intel copyrighted materials,
+# and your use of them is governed by the express license under which they
+# were provided to you ("License"). Unless the License provides otherwise,
+# you may not use, modify, copy, publish, distribute, disclose or transmit
+# this software or the related documents without Intel's prior written
+# permission.
+#
+# This software and the related documents are provided as is, with no express
+# or implied warranties, other than those that are expressly stated in the
+# License.
+
+project(streaming_inference_app)
+
+set(CMAKE_CXX_STANDARD 17)
+set(CMAKE_CXX_STANDARD_REQUIRED True)
+
+set(all_files
+ streaming_inference_app.cpp
+ streaming_inference_app.h
+ command_line.cpp
+ command_line.h)
+
+# Targets
+add_executable(${PROJECT_NAME} ${all_files})
+
+target_link_libraries(${PROJECT_NAME} openvino::runtime)
+target_link_libraries(${PROJECT_NAME} coreDlaRuntimePlugin)
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/categories.txt b/python/openvino/runtime/streaming/streaming_inference_app/categories.txt
new file mode 100644
index 0000000..d77b8ba
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/categories.txt
@@ -0,0 +1,1001 @@
+-
+class ID 0
+class ID 1
+class ID 2
+class ID 3
+class ID 4
+class ID 5
+class ID 6
+class ID 7
+class ID 8
+class ID 9
+class ID 10
+class ID 11
+class ID 12
+class ID 13
+class ID 14
+class ID 15
+class ID 16
+class ID 17
+class ID 18
+class ID 19
+class ID 20
+class ID 21
+class ID 22
+class ID 23
+class ID 24
+class ID 25
+class ID 26
+class ID 27
+class ID 28
+class ID 29
+class ID 30
+class ID 31
+class ID 32
+class ID 33
+class ID 34
+class ID 35
+class ID 36
+class ID 37
+class ID 38
+class ID 39
+class ID 40
+class ID 41
+class ID 42
+class ID 43
+class ID 44
+class ID 45
+class ID 46
+class ID 47
+class ID 48
+class ID 49
+class ID 50
+class ID 51
+class ID 52
+class ID 53
+class ID 54
+class ID 55
+class ID 56
+class ID 57
+class ID 58
+class ID 59
+class ID 60
+class ID 61
+class ID 62
+class ID 63
+class ID 64
+class ID 65
+class ID 66
+class ID 67
+class ID 68
+class ID 69
+class ID 70
+class ID 71
+class ID 72
+class ID 73
+class ID 74
+class ID 75
+class ID 76
+class ID 77
+class ID 78
+class ID 79
+class ID 80
+class ID 81
+class ID 82
+class ID 83
+class ID 84
+class ID 85
+class ID 86
+class ID 87
+class ID 88
+class ID 89
+class ID 90
+class ID 91
+class ID 92
+class ID 93
+class ID 94
+class ID 95
+class ID 96
+class ID 97
+class ID 98
+class ID 99
+class ID 100
+class ID 101
+class ID 102
+class ID 103
+class ID 104
+class ID 105
+class ID 106
+class ID 107
+class ID 108
+class ID 109
+class ID 110
+class ID 111
+class ID 112
+class ID 113
+class ID 114
+class ID 115
+class ID 116
+class ID 117
+class ID 118
+class ID 119
+class ID 120
+class ID 121
+class ID 122
+class ID 123
+class ID 124
+class ID 125
+class ID 126
+class ID 127
+class ID 128
+class ID 129
+class ID 130
+class ID 131
+class ID 132
+class ID 133
+class ID 134
+class ID 135
+class ID 136
+class ID 137
+class ID 138
+class ID 139
+class ID 140
+class ID 141
+class ID 142
+class ID 143
+class ID 144
+class ID 145
+class ID 146
+class ID 147
+class ID 148
+class ID 149
+class ID 150
+class ID 151
+class ID 152
+class ID 153
+class ID 154
+class ID 155
+class ID 156
+class ID 157
+class ID 158
+class ID 159
+class ID 160
+class ID 161
+class ID 162
+class ID 163
+class ID 164
+class ID 165
+class ID 166
+class ID 167
+class ID 168
+class ID 169
+class ID 170
+class ID 171
+class ID 172
+class ID 173
+class ID 174
+class ID 175
+class ID 176
+class ID 177
+class ID 178
+class ID 179
+class ID 180
+class ID 181
+class ID 182
+class ID 183
+class ID 184
+class ID 185
+class ID 186
+class ID 187
+class ID 188
+class ID 189
+class ID 190
+class ID 191
+class ID 192
+class ID 193
+class ID 194
+class ID 195
+class ID 196
+class ID 197
+class ID 198
+class ID 199
+class ID 200
+class ID 201
+class ID 202
+class ID 203
+class ID 204
+class ID 205
+class ID 206
+class ID 207
+class ID 208
+class ID 209
+class ID 210
+class ID 211
+class ID 212
+class ID 213
+class ID 214
+class ID 215
+class ID 216
+class ID 217
+class ID 218
+class ID 219
+class ID 220
+class ID 221
+class ID 222
+class ID 223
+class ID 224
+class ID 225
+class ID 226
+class ID 227
+class ID 228
+class ID 229
+class ID 230
+class ID 231
+class ID 232
+class ID 233
+class ID 234
+class ID 235
+class ID 236
+class ID 237
+class ID 238
+class ID 239
+class ID 240
+class ID 241
+class ID 242
+class ID 243
+class ID 244
+class ID 245
+class ID 246
+class ID 247
+class ID 248
+class ID 249
+class ID 250
+class ID 251
+class ID 252
+class ID 253
+class ID 254
+class ID 255
+class ID 256
+class ID 257
+class ID 258
+class ID 259
+class ID 260
+class ID 261
+class ID 262
+class ID 263
+class ID 264
+class ID 265
+class ID 266
+class ID 267
+class ID 268
+class ID 269
+class ID 270
+class ID 271
+class ID 272
+class ID 273
+class ID 274
+class ID 275
+class ID 276
+class ID 277
+class ID 278
+class ID 279
+class ID 280
+class ID 281
+class ID 282
+class ID 283
+class ID 284
+class ID 285
+class ID 286
+class ID 287
+class ID 288
+class ID 289
+class ID 290
+class ID 291
+class ID 292
+class ID 293
+class ID 294
+class ID 295
+class ID 296
+class ID 297
+class ID 298
+class ID 299
+class ID 300
+class ID 301
+class ID 302
+class ID 303
+class ID 304
+class ID 305
+class ID 306
+class ID 307
+class ID 308
+class ID 309
+class ID 310
+class ID 311
+class ID 312
+class ID 313
+class ID 314
+class ID 315
+class ID 316
+class ID 317
+class ID 318
+class ID 319
+class ID 320
+class ID 321
+class ID 322
+class ID 323
+class ID 324
+class ID 325
+class ID 326
+class ID 327
+class ID 328
+class ID 329
+class ID 330
+class ID 331
+class ID 332
+class ID 333
+class ID 334
+class ID 335
+class ID 336
+class ID 337
+class ID 338
+class ID 339
+class ID 340
+class ID 341
+class ID 342
+class ID 343
+class ID 344
+class ID 345
+class ID 346
+class ID 347
+class ID 348
+class ID 349
+class ID 350
+class ID 351
+class ID 352
+class ID 353
+class ID 354
+class ID 355
+class ID 356
+class ID 357
+class ID 358
+class ID 359
+class ID 360
+class ID 361
+class ID 362
+class ID 363
+class ID 364
+class ID 365
+class ID 366
+class ID 367
+class ID 368
+class ID 369
+class ID 370
+class ID 371
+class ID 372
+class ID 373
+class ID 374
+class ID 375
+class ID 376
+class ID 377
+class ID 378
+class ID 379
+class ID 380
+class ID 381
+class ID 382
+class ID 383
+class ID 384
+class ID 385
+class ID 386
+class ID 387
+class ID 388
+class ID 389
+class ID 390
+class ID 391
+class ID 392
+class ID 393
+class ID 394
+class ID 395
+class ID 396
+class ID 397
+class ID 398
+class ID 399
+class ID 400
+class ID 401
+class ID 402
+class ID 403
+class ID 404
+class ID 405
+class ID 406
+class ID 407
+class ID 408
+class ID 409
+class ID 410
+class ID 411
+class ID 412
+class ID 413
+class ID 414
+class ID 415
+class ID 416
+class ID 417
+class ID 418
+class ID 419
+class ID 420
+class ID 421
+class ID 422
+class ID 423
+class ID 424
+class ID 425
+class ID 426
+class ID 427
+class ID 428
+class ID 429
+class ID 430
+class ID 431
+class ID 432
+class ID 433
+class ID 434
+class ID 435
+class ID 436
+class ID 437
+class ID 438
+class ID 439
+class ID 440
+class ID 441
+class ID 442
+class ID 443
+class ID 444
+class ID 445
+class ID 446
+class ID 447
+class ID 448
+class ID 449
+class ID 450
+class ID 451
+class ID 452
+class ID 453
+class ID 454
+class ID 455
+class ID 456
+class ID 457
+class ID 458
+class ID 459
+class ID 460
+class ID 461
+class ID 462
+class ID 463
+class ID 464
+class ID 465
+class ID 466
+class ID 467
+class ID 468
+class ID 469
+class ID 470
+class ID 471
+class ID 472
+class ID 473
+class ID 474
+class ID 475
+class ID 476
+class ID 477
+class ID 478
+class ID 479
+class ID 480
+class ID 481
+class ID 482
+class ID 483
+class ID 484
+class ID 485
+class ID 486
+class ID 487
+class ID 488
+class ID 489
+class ID 490
+class ID 491
+class ID 492
+class ID 493
+class ID 494
+class ID 495
+class ID 496
+class ID 497
+class ID 498
+class ID 499
+class ID 500
+class ID 501
+class ID 502
+class ID 503
+class ID 504
+class ID 505
+class ID 506
+class ID 507
+class ID 508
+class ID 509
+class ID 510
+class ID 511
+class ID 512
+class ID 513
+class ID 514
+class ID 515
+class ID 516
+class ID 517
+class ID 518
+class ID 519
+class ID 520
+class ID 521
+class ID 522
+class ID 523
+class ID 524
+class ID 525
+class ID 526
+class ID 527
+class ID 528
+class ID 529
+class ID 530
+class ID 531
+class ID 532
+class ID 533
+class ID 534
+class ID 535
+class ID 536
+class ID 537
+class ID 538
+class ID 539
+class ID 540
+class ID 541
+class ID 542
+class ID 543
+class ID 544
+class ID 545
+class ID 546
+class ID 547
+class ID 548
+class ID 549
+class ID 550
+class ID 551
+class ID 552
+class ID 553
+class ID 554
+class ID 555
+class ID 556
+class ID 557
+class ID 558
+class ID 559
+class ID 560
+class ID 561
+class ID 562
+class ID 563
+class ID 564
+class ID 565
+class ID 566
+class ID 567
+class ID 568
+class ID 569
+class ID 570
+class ID 571
+class ID 572
+class ID 573
+class ID 574
+class ID 575
+class ID 576
+class ID 577
+class ID 578
+class ID 579
+class ID 580
+class ID 581
+class ID 582
+class ID 583
+class ID 584
+class ID 585
+class ID 586
+class ID 587
+class ID 588
+class ID 589
+class ID 590
+class ID 591
+class ID 592
+class ID 593
+class ID 594
+class ID 595
+class ID 596
+class ID 597
+class ID 598
+class ID 599
+class ID 600
+class ID 601
+class ID 602
+class ID 603
+class ID 604
+class ID 605
+class ID 606
+class ID 607
+class ID 608
+class ID 609
+class ID 610
+class ID 611
+class ID 612
+class ID 613
+class ID 614
+class ID 615
+class ID 616
+class ID 617
+class ID 618
+class ID 619
+class ID 620
+class ID 621
+class ID 622
+class ID 623
+class ID 624
+class ID 625
+class ID 626
+class ID 627
+class ID 628
+class ID 629
+class ID 630
+class ID 631
+class ID 632
+class ID 633
+class ID 634
+class ID 635
+class ID 636
+class ID 637
+class ID 638
+class ID 639
+class ID 640
+class ID 641
+class ID 642
+class ID 643
+class ID 644
+class ID 645
+class ID 646
+class ID 647
+class ID 648
+class ID 649
+class ID 650
+class ID 651
+class ID 652
+class ID 653
+class ID 654
+class ID 655
+class ID 656
+class ID 657
+class ID 658
+class ID 659
+class ID 660
+class ID 661
+class ID 662
+class ID 663
+class ID 664
+class ID 665
+class ID 666
+class ID 667
+class ID 668
+class ID 669
+class ID 670
+class ID 671
+class ID 672
+class ID 673
+class ID 674
+class ID 675
+class ID 676
+class ID 677
+class ID 678
+class ID 679
+class ID 680
+class ID 681
+class ID 682
+class ID 683
+class ID 684
+class ID 685
+class ID 686
+class ID 687
+class ID 688
+class ID 689
+class ID 690
+class ID 691
+class ID 692
+class ID 693
+class ID 694
+class ID 695
+class ID 696
+class ID 697
+class ID 698
+class ID 699
+class ID 700
+class ID 701
+class ID 702
+class ID 703
+class ID 704
+class ID 705
+class ID 706
+class ID 707
+class ID 708
+class ID 709
+class ID 710
+class ID 711
+class ID 712
+class ID 713
+class ID 714
+class ID 715
+class ID 716
+class ID 717
+class ID 718
+class ID 719
+class ID 720
+class ID 721
+class ID 722
+class ID 723
+class ID 724
+class ID 725
+class ID 726
+class ID 727
+class ID 728
+class ID 729
+class ID 730
+class ID 731
+class ID 732
+class ID 733
+class ID 734
+class ID 735
+class ID 736
+class ID 737
+class ID 738
+class ID 739
+class ID 740
+class ID 741
+class ID 742
+class ID 743
+class ID 744
+class ID 745
+class ID 746
+class ID 747
+class ID 748
+class ID 749
+class ID 750
+class ID 751
+class ID 752
+class ID 753
+class ID 754
+class ID 755
+class ID 756
+class ID 757
+class ID 758
+class ID 759
+class ID 760
+class ID 761
+class ID 762
+class ID 763
+class ID 764
+class ID 765
+class ID 766
+class ID 767
+class ID 768
+class ID 769
+class ID 770
+class ID 771
+class ID 772
+class ID 773
+class ID 774
+class ID 775
+class ID 776
+class ID 777
+class ID 778
+class ID 779
+class ID 780
+class ID 781
+class ID 782
+class ID 783
+class ID 784
+class ID 785
+class ID 786
+class ID 787
+class ID 788
+class ID 789
+class ID 790
+class ID 791
+class ID 792
+class ID 793
+class ID 794
+class ID 795
+class ID 796
+class ID 797
+class ID 798
+class ID 799
+class ID 800
+class ID 801
+class ID 802
+class ID 803
+class ID 804
+class ID 805
+class ID 806
+class ID 807
+class ID 808
+class ID 809
+class ID 810
+class ID 811
+class ID 812
+class ID 813
+class ID 814
+class ID 815
+class ID 816
+class ID 817
+class ID 818
+class ID 819
+class ID 820
+class ID 821
+class ID 822
+class ID 823
+class ID 824
+class ID 825
+class ID 826
+class ID 827
+class ID 828
+class ID 829
+class ID 830
+class ID 831
+class ID 832
+class ID 833
+class ID 834
+class ID 835
+class ID 836
+class ID 837
+class ID 838
+class ID 839
+class ID 840
+class ID 841
+class ID 842
+class ID 843
+class ID 844
+class ID 845
+class ID 846
+class ID 847
+class ID 848
+class ID 849
+class ID 850
+class ID 851
+class ID 852
+class ID 853
+class ID 854
+class ID 855
+class ID 856
+class ID 857
+class ID 858
+class ID 859
+class ID 860
+class ID 861
+class ID 862
+class ID 863
+class ID 864
+class ID 865
+class ID 866
+class ID 867
+class ID 868
+class ID 869
+class ID 870
+class ID 871
+class ID 872
+class ID 873
+class ID 874
+class ID 875
+class ID 876
+class ID 877
+class ID 878
+class ID 879
+class ID 880
+class ID 881
+class ID 882
+class ID 883
+class ID 884
+class ID 885
+class ID 886
+class ID 887
+class ID 888
+class ID 889
+class ID 890
+class ID 891
+class ID 892
+class ID 893
+class ID 894
+class ID 895
+class ID 896
+class ID 897
+class ID 898
+class ID 899
+class ID 900
+class ID 901
+class ID 902
+class ID 903
+class ID 904
+class ID 905
+class ID 906
+class ID 907
+class ID 908
+class ID 909
+class ID 910
+class ID 911
+class ID 912
+class ID 913
+class ID 914
+class ID 915
+class ID 916
+class ID 917
+class ID 918
+class ID 919
+class ID 920
+class ID 921
+class ID 922
+class ID 923
+class ID 924
+class ID 925
+class ID 926
+class ID 927
+class ID 928
+class ID 929
+class ID 930
+class ID 931
+class ID 932
+class ID 933
+class ID 934
+class ID 935
+class ID 936
+class ID 937
+class ID 938
+class ID 939
+class ID 940
+class ID 941
+class ID 942
+class ID 943
+class ID 944
+class ID 945
+class ID 946
+class ID 947
+class ID 948
+class ID 949
+class ID 950
+class ID 951
+class ID 952
+class ID 953
+class ID 954
+class ID 955
+class ID 956
+class ID 957
+class ID 958
+class ID 959
+class ID 960
+class ID 961
+class ID 962
+class ID 963
+class ID 964
+class ID 965
+class ID 966
+class ID 967
+class ID 968
+class ID 969
+class ID 970
+class ID 971
+class ID 972
+class ID 973
+class ID 974
+class ID 975
+class ID 976
+class ID 977
+class ID 978
+class ID 979
+class ID 980
+class ID 981
+class ID 982
+class ID 983
+class ID 984
+class ID 985
+class ID 986
+class ID 987
+class ID 988
+class ID 989
+class ID 990
+class ID 991
+class ID 992
+class ID 993
+class ID 994
+class ID 995
+class ID 996
+class ID 997
+class ID 998
+class ID 999
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/command_line.cpp b/python/openvino/runtime/streaming/streaming_inference_app/command_line.cpp
new file mode 100644
index 0000000..794310b
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/command_line.cpp
@@ -0,0 +1,72 @@
+// Copyright 2021-2023 Intel Corporation.
+//
+// This software and the related documents are Intel copyrighted materials,
+// and your use of them is governed by the express license under which they
+// were provided to you ("License"). Unless the License provides otherwise,
+// you may not use, modify, copy, publish, distribute, disclose or transmit
+// this software or the related documents without Intel's prior written
+// permission.
+//
+// This software and the related documents are provided as is, with no express
+// or implied warranties, other than those that are expressly stated in the
+// License.
+
+#include "command_line.h"
+#include <algorithm>
+
+static void TrimString(std::string& trimString) {
+ trimString.erase(0, trimString.find_first_not_of(" \n\r\t"));
+ trimString.erase(trimString.find_last_not_of(" \n\r\t") + 1);
+}
+
+static void MakeLower(std::string& stringValue) {
+ std::transform(stringValue.begin(), stringValue.end(), stringValue.begin(), ::tolower);
+}
+
+// Program -option=value
+CommandLine::CommandLine(int argumentCount, char* argumentValues[]) {
+ if (argumentCount > 0) _executableName = argumentValues[0];
+
+ for (int i = 1; i < argumentCount; i++) {
+ std::string inputString(argumentValues[i]);
+ std::string nextChar = inputString.substr(0, 1);
+ if ((nextChar == "-") or (nextChar == "/")) {
+ inputString = inputString.substr(1);
+ size_t equals = inputString.find("=");
+ std::string option;
+ std::string value;
+
+ if (equals == std::string::npos) {
+ option = inputString;
+ } else {
+ option = inputString.substr(0, equals);
+ value = inputString.substr(equals + 1);
+ }
+
+ TrimString(option);
+ TrimString(value);
+ MakeLower(option);
+ _optionMap[option] = value;
+ }
+ }
+}
+
+std::string CommandLine::GetOptionValue(const char* optionName) {
+ auto i = _optionMap.find(optionName);
+ if (i != _optionMap.end())
+ return i->second;
+ else
+ return "";
+}
+
+bool CommandLine::HaveOption(const char* optionName) { return (_optionMap.find(optionName) != _optionMap.end()); }
+
+bool CommandLine::GetOption(const char* optionName, std::string& optionValue) {
+ auto i = _optionMap.find(optionName);
+ if (i == _optionMap.end()) return false;
+
+ optionValue = i->second;
+ return true;
+}
+
+size_t CommandLine::NumOptions() { return _optionMap.size(); }
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/command_line.h b/python/openvino/runtime/streaming/streaming_inference_app/command_line.h
new file mode 100644
index 0000000..41b12f0
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/command_line.h
@@ -0,0 +1,31 @@
+// Copyright 2021-2023 Intel Corporation.
+//
+// This software and the related documents are Intel copyrighted materials,
+// and your use of them is governed by the express license under which they
+// were provided to you ("License"). Unless the License provides otherwise,
+// you may not use, modify, copy, publish, distribute, disclose or transmit
+// this software or the related documents without Intel's prior written
+// permission.
+//
+// This software and the related documents are provided as is, with no express
+// or implied warranties, other than those that are expressly stated in the
+// License.
+
+#pragma once
+#include <string>
+#include <unordered_map>
+
+class CommandLine {
+ public:
+ CommandLine(int argumentCount, char* argumentValues[]);
+
+ std::string GetOptionValue(const char* optionName);
+ bool GetOption(const char* optionName, std::string& optionValue);
+ bool HaveOption(const char* optionName);
+ std::string GetExecutableName() { return _executableName; }
+ size_t NumOptions();
+
+ private:
+ std::string _executableName;
+ std::unordered_map<std::string, std::string> _optionMap;
+};
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.cpp b/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.cpp
new file mode 100644
index 0000000..d0e1ed0
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.cpp
@@ -0,0 +1,413 @@
+// Copyright 2023 Intel Corporation.
+//
+// This software and the related documents are Intel copyrighted materials,
+// and your use of them is governed by the express license under which they
+// were provided to you ("License"). Unless the License provides otherwise,
+// you may not use, modify, copy, publish, distribute, disclose or transmit
+// this software or the related documents without Intel's prior written
+// permission.
+//
+// This software and the related documents are provided as is, with no express
+// or implied warranties, other than those that are expressly stated in the
+// License.
+
+#include "streaming_inference_app.h"
+#include <fcntl.h>
+#include <signal.h>
+#include <sys/utsname.h>
+#include <unistd.h>
+#include <algorithm>
+#include <filesystem>
+#include <fstream>
+#include <sstream>
+#include <thread>
+#include "dla_plugin_config.hpp"
+
+using namespace std::chrono_literals;
+
+std::ofstream StreamingInferenceApp::_resultsStream("results.txt");
+std::mutex StreamingInferenceApp::_signalMutex;
+std::condition_variable StreamingInferenceApp::_signalConditionVariable;
+std::chrono::time_point<std::chrono::system_clock> StreamingInferenceApp::_startTime;
+
+int main(int numParams, char* paramValues[]) {
+ StreamingInferenceApp app(numParams, paramValues);
+
+ try {
+ app.Run();
+ } catch (const std::exception& ex) {
+ std::cerr << ex.what() << '\n';
+ }
+ return 0;
+}
+
+StreamingInferenceApp::StreamingInferenceApp(int numParams, char* paramValues[])
+ : _commandLine(numParams, paramValues) {
+ OsStartup();
+ LoadClassNames();
+}
+
+StreamingInferenceApp::~StreamingInferenceApp() {
+ timespec waitTimeout = {};
+ if (_pCancelSemaphore) {
+ // Reset the cancel semaphore
+ int r = 0;
+ do {
+ r = ::sem_timedwait(_pCancelSemaphore, &waitTimeout);
+ } while (r == 0);
+ ::sem_close(_pCancelSemaphore);
+ }
+
+ if (_pReadyForImageStreamSemaphore) {
+ // Reset the ready semaphore
+ int r = 0;
+ do {
+ r = ::sem_timedwait(_pReadyForImageStreamSemaphore, &waitTimeout);
+ } while (r == 0);
+ ::sem_close(_pReadyForImageStreamSemaphore);
+ }
+}
+
+void StreamingInferenceApp::Run() {
+ std::filesystem::path pluginsFilename = "plugins.xml";
+
+ std::string deviceName;
+ std::string arch;
+ std::string model;
+
+ // Get the command line options for the model, arch file, and device
+ if (not _commandLine.GetOption("model", model) or not _commandLine.GetOption("arch", arch) or
+ not _commandLine.GetOption("device", deviceName)) {
+ return Usage();
+ }
+
+ std::filesystem::path architectureFilename = arch;
+ std::filesystem::path compiledModelFilename = model;
+
+ // Check that the provided files do in fact exist
+ if (not CheckFileExists(architectureFilename, "architecture") or not CheckFileExists(pluginsFilename, "plugins") or
+ not CheckFileExists(compiledModelFilename, "compiled model")) {
+ return;
+ }
+
+ InferenceEngine::Core inferenceEngine(pluginsFilename);
+
+ // Setup CoreDLA private configuration parameters
+ const std::map<std::string, std::string> configParameters;
+ inferenceEngine.SetConfig({{DLIAPlugin::properties::arch_path.name(), architectureFilename}}, "FPGA");
+
+ // If dropSourceBuffers is 0, no input buffers are dropped
+ // If dropSourceBuffers is 1, then 1 buffer is processed, 1 gets dropped
+ // If dropSourceBuffers is 2, then 1 buffer is processed, 2 get dropped, etc.
+ uint32_t dropSourceBuffers = 0;
+
+ inferenceEngine.SetConfig({{DLIAPlugin::properties::streaming_drop_source_buffers.name(), std::to_string(dropSourceBuffers)},
+ {DLIAPlugin::properties::external_streaming.name(), CONFIG_VALUE(YES)}},
+ "FPGA");
+
+ std::ifstream inputFile(compiledModelFilename, std::fstream::binary);
+ if (not inputFile) {
+ std::cout << "Failed to load compiled model file.\n";
+ return;
+ }
+
+ // Load the model to the device
+ InferenceEngine::ExecutableNetwork importedNetwork = inferenceEngine.ImportNetwork(inputFile, deviceName, {});
+
+ // The plugin defines the number of inferences requests required for streaming
+ uint32_t numStreamingInferenceRequests = importedNetwork.GetMetric(DLIAPlugin::properties::num_streaming_inference_requests.name()).as<uint32_t>();
+ const std::string cancelSemaphoreName = importedNetwork.GetMetric(DLIAPlugin::properties::cancel_semaphore_name.name()).as<std::string>();
+ _cancelSemaphoreName = cancelSemaphoreName;
+
+ for (uint32_t i = 0; i < numStreamingInferenceRequests; i++) {
+ auto spInferenceData = std::make_shared<SingleInferenceData>(this, importedNetwork, i);
+ _inferences.push_back(spInferenceData);
+ }
+
+ // Start the inference requests. Streaming inferences will reschedule
+ // themselves when complete
+ for (auto& inference : _inferences) {
+ inference->StartAsync();
+ }
+
+ std::cout << "Ready to start image input stream.\n";
+
+ // Signal the image streaming app that we are ready, so it can
+ // begin transferring files
+ SetReadyForImageStreamSemaphore();
+
+ // Wait until Ctrl+C
+ bool done = false;
+ while (not done) {
+ std::unique_lock<std::mutex> lock(_signalMutex);
+ done = (_signalConditionVariable.wait_for(lock, 1000ms) != std::cv_status::timeout);
+ }
+
+ SetShutdownSemaphore();
+
+ for (auto& inference : _inferences) {
+ inference->Cancel();
+ }
+
+ _inferences.clear();
+}
+
+
+void StreamingInferenceApp::SetShutdownSemaphore() {
+ _pCancelSemaphore = ::sem_open(_cancelSemaphoreName.c_str(), O_CREAT, 0644, 0);
+ if (_pCancelSemaphore) {
+ ::sem_post(_pCancelSemaphore);
+ }
+}
+
+
+void StreamingInferenceApp::SetReadyForImageStreamSemaphore() {
+ _pReadyForImageStreamSemaphore = ::sem_open("/CoreDLA_ready_for_streaming", O_CREAT, 0644, 0);
+ if (_pReadyForImageStreamSemaphore) {
+ ::sem_post(_pReadyForImageStreamSemaphore);
+ }
+}
+
+
+/**
+ * Print a help menu to the console
+ */
+void StreamingInferenceApp::Usage() {
+ std::cout << "Usage:\n";
+ std::cout << "\tstreaming_inference_app -model=<model> -arch=<arch> -device=<device>\n\n";
+ std::cout << "Where:\n";
+ std::cout << "\t<model> is the compiled model binary file, eg /home/root/resnet-50-tf/RN50_Performance_no_folding.bin\n";
+ std::cout << "\t<arch> is the architecture file, eg /home/root/resnet-50-tf/A10_Performance.arch\n";
+ std::cout << "\t<device> is the OpenVINO device ID, eg HETERO:FPGA or HETERO:FPGA,CPU\n";
+}
+
+
+/**
+ * Check that a file exists
+ *
+ * @param[in] filename Filename to check
+ * @param[in] message Description of file to display if it does not exist
+ * @returns true if the file exists, false otherwise
+ */
+bool StreamingInferenceApp::CheckFileExists(const std::filesystem::path& filename, const std::string& message) {
+ if (not std::filesystem::exists(filename)) {
+ std::cout << "Can't find " << message << ", '" << filename.c_str() << "'\n";
+ return false;
+ }
+
+ return true;
+}
+
+////////////
+
+std::atomic<uint32_t> SingleInferenceData::_atomic{0};
+uint32_t SingleInferenceData::_numResults = 0;
+
+SingleInferenceData::SingleInferenceData(StreamingInferenceApp* pApp,
+ InferenceEngine::ExecutableNetwork& importedNetwork,
+ uint32_t index)
+ : _pApp(pApp), _importedNetwork(importedNetwork), _index(index), _inferenceCount(0) {
+ // Set up output blob
+ InferenceEngine::ConstOutputsDataMap outputsInfo = importedNetwork.GetOutputsInfo();
+ std::shared_ptr<const InferenceEngine::Data> spOutputInfo = outputsInfo.begin()->second;
+ std::string outputName = outputsInfo.begin()->first;
+
+ _spOutputBlob = CreateOutputBlob(spOutputInfo);
+
+ // Create an inference request and set its completion callback
+ _inferenceRequest = importedNetwork.CreateInferRequest();
+ auto inferenceRequestCompleteCB = [=]() { ProcessResult(); };
+ _inferenceRequest.SetCompletionCallback(inferenceRequestCompleteCB);
+
+ // Assign the output blob to the inference request
+ _inferenceRequest.SetBlob(outputName, _spOutputBlob);
+}
+
+
+std::shared_ptr<InferenceEngine::Blob> SingleInferenceData::CreateOutputBlob(
+ std::shared_ptr<const InferenceEngine::Data> spOutputInfo) {
+ const InferenceEngine::TensorDesc& outputTensorDesc = spOutputInfo->getTensorDesc();
+ std::shared_ptr<InferenceEngine::Blob> pOutputBob = InferenceEngine::make_shared_blob<float>(outputTensorDesc);
+ pOutputBob->allocate();
+
+ InferenceEngine::MemoryBlob::Ptr pMemoryBlob = InferenceEngine::as<InferenceEngine::MemoryBlob>(pOutputBob);
+ if (pMemoryBlob) {
+ auto lockedMemory = pMemoryBlob->wmap();
+ float* pOutputBlobData = lockedMemory.as<float*>();
+ if (pOutputBlobData) {
+ size_t outputSize = pOutputBob->size();
+ for (size_t i = 0; i < outputSize; i++) {
+ pOutputBlobData[i] = 0.0f;
+ }
+ }
+ }
+
+ return pOutputBob;
+}
+
+
+void SingleInferenceData::StartAsync() {
+ _inferenceCount = _atomic++;
+ _inferenceRequest.StartAsync();
+}
+
+void SingleInferenceData::Wait() { _inferenceRequest.Wait(); }
+
+void SingleInferenceData::Cancel() { _inferenceRequest.Cancel(); }
+
+
+/**
+ * Stores the results of an inference
+ *
+ * The index corresponds to the category of the image, and the score is
+ * the confidence level of the image.
+ */
+class ResultItem {
+ public:
+ uint32_t _index;
+ float _score;
+ bool operator<(const ResultItem& other) { return (_score > other._score); }
+};
+
+
+/**
+ * Called when inference request has completed
+ *
+ * The inference results are floating point numbers consisting of the score for each category.
+ * The scores are then sorted and the highest is written to the console. The top 5 scores of the
+ * first 1000 images are saved to results.txt.
+ *
+ * Set as a callback in SingleInferenceData()
+ */
+void SingleInferenceData::ProcessResult() {
+ if (_pApp and _pApp->IsCancelling()) {
+ return;
+ }
+
+ // Increment the number of inference results that have returned thus far
+ _numResults++;
+
+ // If this is the first returned inference, store the current time to calculate the inference rate
+ if (_numResults == 1) {
+ StreamingInferenceApp::_startTime = std::chrono::system_clock::now();
+ } else if (_numResults == 101) {
+ // The inference rate is calculated afer 100 results have been received
+ auto endTime = std::chrono::system_clock::now();
+ auto duration = endTime - StreamingInferenceApp::_startTime;
+ double durationMS = (double)std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
+ double durationSecondsOne = durationMS / 100000.0;
+ double rate = 1.0 / durationSecondsOne;
+ std::cout << "Inference rate = " << rate << '\n';
+ }
+
+ // Create a float pointer to the returned data
+ size_t outputSize = _spOutputBlob->size();
+ float* pOutputData = _spOutputBlob->buffer().as<float*>();
+ if (!pOutputData) {
+ return;
+ }
+
+ // Store each score as a ResultItem
+ std::vector<ResultItem> results;
+ for (size_t i = 0; i < outputSize; i++) {
+ results.push_back({(uint32_t)i, pOutputData[i]});
+ }
+
+ // Sort the scores and set up the output streams
+ std::sort(results.begin(), results.end());
+ std::stringstream fileString;
+ std::stringstream outString;
+ bool flushFile = false;
+
+ // Store the top 5 results of the first 1000 images to be written to a file
+ if (_numResults <= 1000) {
+ fileString << "Result: image[" << _numResults << "]\n";
+ fileString << std::fixed << std::setprecision(1);
+
+ for (size_t i = 0; i < 5; i++) {
+ std::string className = _pApp->_imageNetClasses[results[i]._index];
+ float score = results[i]._score * 100.0f;
+ fileString << (i + 1) << ". " << className << ", score = " << score << '\n';
+ }
+
+ fileString << '\n';
+ }
+
+ if (_numResults == 1001) {
+ fileString << "End of results capture\n";
+ flushFile = true;
+ }
+
+ // Store the top score to write to the console
+ outString << std::fixed << std::setprecision(1);
+ std::string className = _pApp->_imageNetClasses[results[0]._index];
+ float score = results[0]._score * 100.0f;
+ outString << _numResults << " - " << className << ", score = " << score << '\n';
+
+ // Write the results to the file
+ std::string writeFileString = fileString.str();
+ if (not writeFileString.empty()) {
+ StreamingInferenceApp::_resultsStream << writeFileString;
+ if (flushFile) {
+ StreamingInferenceApp::_resultsStream << std::endl;
+ }
+ }
+
+ // Write the top score to the console
+ std::cout << outString.str();
+
+ // Start again
+ StartAsync();
+}
+
+
+/**
+ * Load the categories and store them in _imageNetClasses
+ */
+void StreamingInferenceApp::LoadClassNames() {
+ _imageNetClasses.resize(1001);
+
+ bool validClassFile = false;
+ std::filesystem::path classNameFilePath = "categories.txt";
+
+ if (std::filesystem::exists(classNameFilePath)) {
+ size_t classIndex = 0;
+ std::ifstream classNameStream(classNameFilePath);
+
+ if (classNameStream) {
+ std::string className;
+ while (std::getline(classNameStream, className)) {
+ if (classIndex < 1001) _imageNetClasses[classIndex] = className;
+
+ classIndex++;
+ }
+
+ validClassFile = (classIndex == 1001);
+ if (not validClassFile) {
+ std::cout << "Ignoring the categories.txt file. The file is expected to be a text file "
+ "with 1000 lines.\n";
+ }
+ }
+ } else {
+ std::cout << "No categories.txt file found. This file should contain 1000\n"
+ "lines, with the name of each category on each line.\n";
+ }
+
+ if (not validClassFile) {
+ _imageNetClasses[0] = "NONE";
+ for (size_t i = 1; i <= 1000; i++) {
+ _imageNetClasses[i] = "Image class #" + std::to_string(i);
+ }
+ }
+}
+
+static void SigIntHandler(int) {
+ std::cout << "\nCtrl+C detected. Shutting down application\n";
+ std::lock_guard<std::mutex> lock(StreamingInferenceApp::_signalMutex);
+ StreamingInferenceApp::_signalConditionVariable.notify_one();
+}
+
+void StreamingInferenceApp::OsStartup() {
+ // Ctrl+C will exit the application
+ signal(SIGINT, SigIntHandler);
+}
diff --git a/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.h b/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.h
new file mode 100644
index 0000000..3cdafa0
--- /dev/null
+++ b/python/openvino/runtime/streaming/streaming_inference_app/streaming_inference_app.h
@@ -0,0 +1,74 @@
+// Copyright 2023 Intel Corporation.
+//
+// This software and the related documents are Intel copyrighted materials,
+// and your use of them is governed by the express license under which they
+// were provided to you ("License"). Unless the License provides otherwise,
+// you may not use, modify, copy, publish, distribute, disclose or transmit
+// this software or the related documents without Intel's prior written
+// permission.
+//
+// This software and the related documents are provided as is, with no express
+// or implied warranties, other than those that are expressly stated in the
+// License.
+
+#pragma once
+#include <semaphore.h>
+#include <atomic>
+#include <condition_variable>
+#include <filesystem>
+#include "command_line.h"
+#include "inference_engine.hpp"
+
+class SingleInferenceData;
+using SingleInferenceDataPtr = std::shared_ptr<SingleInferenceData>;
+
+class StreamingInferenceApp {
+ friend class SingleInferenceData;
+
+ public:
+ StreamingInferenceApp(int numParams, char* paramValues[]);
+ ~StreamingInferenceApp();
+ void Usage();
+ void Run();
+ bool IsCancelling() { return (_pCancelSemaphore != nullptr); }
+
+ static std::mutex _signalMutex;
+ static std::condition_variable _signalConditionVariable;
+ static std::chrono::time_point<std::chrono::system_clock> _startTime;
+ static std::ofstream _resultsStream;
+
+ private:
+ void OsStartup();
+ bool CheckFileExists(const std::filesystem::path& filename, const std::string& message);
+ void SetShutdownSemaphore();
+ void SetReadyForImageStreamSemaphore();
+ void LoadClassNames();
+
+ std::vector<SingleInferenceDataPtr> _inferences;
+ CommandLine _commandLine;
+ sem_t* _pCancelSemaphore = nullptr;
+ sem_t* _pReadyForImageStreamSemaphore = nullptr;
+ std::string _cancelSemaphoreName;
+ std::vector<std::string> _imageNetClasses;
+};
+
+class SingleInferenceData {
+ public:
+ SingleInferenceData(StreamingInferenceApp* pApp, InferenceEngine::ExecutableNetwork& importedNetwork, uint32_t index);
+ void StartAsync();
+ void Wait();
+ void Cancel();
+
+ private:
+ void ProcessResult();
+ std::shared_ptr<InferenceEngine::Blob> CreateOutputBlob(std::shared_ptr<const InferenceEngine::Data> spOutputInfo);
+
+ StreamingInferenceApp* _pApp;
+ InferenceEngine::ExecutableNetwork& _importedNetwork;
+ std::shared_ptr<InferenceEngine::Blob> _spOutputBlob;
+ InferenceEngine::InferRequest _inferenceRequest;
+ uint32_t _index;
+ uint32_t _inferenceCount;
+ static uint32_t _numResults;
+ static std::atomic<uint32_t> _atomic;
+};