-
Notifications
You must be signed in to change notification settings - Fork 23
/
ARController.cs
2088 lines (1828 loc) · 80.5 KB
/
ARController.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/*
* ARController.cs
* artoolkitX for Unity
*
* This file is part of artoolkitX for Unity.
*
* artoolkitX for Unity is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* artoolkitX for Unity is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with artoolkitX for Unity. If not, see <http://www.gnu.org/licenses/>.
*
* As a special exception, the copyright holders of this library give you
* permission to link this library with independent modules to produce an
* executable, regardless of the license terms of these independent modules, and to
* copy and distribute the resulting executable under terms of your choice,
* provided that you also meet, for each linked independent module, the terms and
* conditions of the license of that module. An independent module is a module
* which is neither derived from nor based on this library. If you modify this
* library, you may extend this exception to your version of the library, but you
* are not obligated to do so. If you do not wish to do so, delete this exception
* statement from your version.
*
* Copyright 2017-2018 Realmax, Inc.
* Copyright 2015 Daqri, LLC.
* Copyright 2010-2015 ARToolworks, Inc.
*
* Author(s): Philip Lamb, Julian Looser, Dan Bell, Thorsten Bux.
*
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
public enum ContentMode
{
Stretch,
Fit,
Fill,
OneToOne
}
public enum ContentAlign
{
TopLeft,
Top,
TopRight,
Left,
Center,
Right,
BottomLeft,
Bottom,
BottomRight
}
/// <summary>
/// Manages core ARToolKit behaviour.
/// </summary>
///
[ExecuteInEditMode]
[RequireComponent(typeof(ARVideoConfig))]
public class ARController : MonoBehaviour
{
//
// Logging.
//
public static Action<String> logCallback { get; set; }
private static List<String> logMessages = new List<String>();
private const int MaximumLogMessages = 1000;
private const string LogTag = "ARController: ";
// Application preferences.
public bool UseNativeGLTexturingIfAvailable = true;
public bool AllowNonRGBVideo = false;
public bool QuitOnEscOrBack = true;
public bool AutoStartAR = true;
//API Addition - Users can check this value to see if the camera is initialised running.
//Usage: Used to show 'Please Wait' UIs while the camera is still initialising or markers are being loaded.
[HideInInspector]
public bool IsRunning { get { return _running; } }
//
// State.
//
private string _version = "";
private bool _running = false;
private bool _runOnUnpause = false;
private bool _sceneConfiguredForVideo = false;
private bool _sceneConfiguredForVideoWaitingMessageLogged = false;
private bool _useNativeGLTexturing = false;
private bool _useColor32 = true;
//
// Video source 0.
//
// Config. in.
public string videoCParamName0 = "";
public string videoConfigurationLinux0 = "";
public int BackgroundLayer0 = 8;
public bool PluginConfigurationRequiredFlag = false;
// Config. out.
private int _videoWidth0 = 0;
private int _videoHeight0 = 0;
private int _videoPixelSize0 = 0;
private string _videoPixelFormatString0 = "";
private Matrix4x4 _videoProjectionMatrix0;
// Unity objects.
private GameObject _videoBackgroundMeshGO0 = null; // The GameObject which holds the MeshFilter and MeshRenderer for the background video, and also the Camera object(s) used to render them.
private Color[] _videoColorArray0 = null; // An array used to fetch pixels from the native side, only if not using native GL texturing.
private Color32[] _videoColor32Array0 = null; // An array used to fetch pixels from the native side, only if not using native GL texturing.
private Texture2D _videoTexture0 = null; // Texture object with the video image.
private Material _videoMaterial0 = null; // Material which uses our "VideoPlaneNoLight" shader, and paints itself with _videoTexture0.
// Stereo config.
public bool VideoIsStereo = false;
public string transL2RName = "transL2R";
//
// Video source 1.
//
// Config. in.
public string videoCParamName1 = "";
public int BackgroundLayer1 = 9;
// Config. out.
private int _videoWidth1 = 0;
private int _videoHeight1 = 0;
private int _videoPixelSize1 = 0;
private string _videoPixelFormatString1 = "";
private Matrix4x4 _videoProjectionMatrix1;
// Unity objects.
private GameObject _videoBackgroundMeshGO1 = null; // The GameObject which holds the MeshFilter and MeshRenderer for the background video, and also the Camera object(s) used to render them.
private Color[] _videoColorArray1 = null; // An array used to fetch pixels from the native side, only if not using native GL texturing.
private Color32[] _videoColor32Array1 = null; // An array used to fetch pixels from the native side, only if not using native GL texturing.
private Texture2D _videoTexture1 = null; // Texture object with the video image.
private Material _videoMaterial1 = null; // Material which uses our "VideoPlaneNoLight" shader, and paints itself with _videoTexture0.
//
// Background camera(s).
//
private Camera clearCamera = null;
private GameObject _videoBackgroundCameraGO0 = null; // The GameObject which holds the Camera object for the mono / stereo left-eye video background.
private Camera _videoBackgroundCamera0 = null; // The Camera component attached to _videoBackgroundCameraGO0. Easier to keep this reference than calling _videoBackgroundCameraGO0.GetComponent<Camera>() each time.
private GameObject _videoBackgroundCameraGO1 = null; // The GameObject which holds the Camera object(s) for the stereo right-eye video background.
private Camera _videoBackgroundCamera1 = null; // The Camera component attached to _videoBackgroundCameraGO1. Easier to keep this reference than calling _videoBackgroundCameraGO1.GetComponent<Camera>() eaach time.
//
// Other
//
public float NearPlane = 0.01f;
public float FarPlane = 5.0f;
public bool ContentRotate90 = false; // Used in CreateVideoBackgroundCamera().
public bool ContentFlipH = false;
public bool ContentFlipV = false;
public ContentAlign ContentAlign = ContentAlign.Center;
#if UNITY_ANDROID
//
//Android Plugin
//
private AndroidJavaObject androidPlugin = null;
#endif
//private int _frameStatsCount = 0;
//private float _frameStatsTimeUpdateTexture = 0.0f;
//private float _frameStatsTimeSetPixels = 0.0f;
//private float _frameStatsTimeApply = 0.0f;
public readonly static Dictionary<ContentMode, string> ContentModeNames = new Dictionary<ContentMode, string>
{
{ContentMode.Stretch, "Stretch"},
{ContentMode.Fit, "Fit"},
{ContentMode.Fill, "Fill"},
{ContentMode.OneToOne, "1:1"},
};
// Frames per second calculations
private int frameCounter = 0;
private float timeCounter = 0.0f;
private float lastFramerate = 0.0f;
private float refreshTime = 0.5f;
public enum ARToolKitThresholdMode
{
Manual = 0,
Median = 1,
Otsu = 2,
Adaptive = 3,
Bracketing = 4
}
public enum ARToolKitLabelingMode
{
WhiteRegion = 0,
BlackRegion = 1,
}
public enum ARToolKitPatternDetectionMode
{
AR_TEMPLATE_MATCHING_COLOR = 0,
AR_TEMPLATE_MATCHING_MONO = 1,
AR_MATRIX_CODE_DETECTION = 2,
AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX = 3,
AR_TEMPLATE_MATCHING_MONO_AND_MATRIX = 4
};
public enum ARToolKitMatrixCodeType
{
AR_MATRIX_CODE_3x3 = 0x03,
AR_MATRIX_CODE_3x3_PARITY65 = 0x03 | 0x100,
AR_MATRIX_CODE_3x3_HAMMING63 = 0x03 | 0x200,
AR_MATRIX_CODE_4x4 = 0x04,
AR_MATRIX_CODE_4x4_BCH_13_9_3 = 0x04 | 0x300,
AR_MATRIX_CODE_4x4_BCH_13_5_5 = 0x04 | 0x400,
AR_MATRIX_CODE_5x5 = 0x05,
AR_MATRIX_CODE_5x5_BCH_22_12_5 = 0x05 | 0x400,
AR_MATRIX_CODE_5x5_BCH_22_7_7 = 0x05 | 0x500,
AR_MATRIX_CODE_6x6 = 0x06,
// AR_MATRIX_CODE_GLOBAL_ID = 0x0e | 0xb00
};
public enum ARToolKitImageProcMode
{
AR_IMAGE_PROC_FRAME_IMAGE = 0,
AR_IMAGE_PROC_FIELD_IMAGE = 1
};
public enum ARW_UNITY_RENDER_EVENTID
{
NOP = 0, // No operation (does nothing).
UPDATE_TEXTURE_GL = 1,
UPDATE_TEXTURE_GL_STEREO = 2,
};
public enum ARW_ERROR
{
ARW_ERROR_NONE = 0,
ARW_ERROR_GENERIC = -1,
ARW_ERROR_OUT_OF_MEMORY = -2,
ARW_ERROR_OVERFLOW = -3,
ARW_ERROR_NODATA = -4,
ARW_ERROR_IOERROR = -5,
ARW_ERROR_EOF = -6,
ARW_ERROR_TIMEOUT = -7,
ARW_ERROR_INVALID_COMMAND = -8,
ARW_ERROR_INVALID_ENUM = -9,
ARW_ERROR_THREADS = -10,
ARW_ERROR_FILE_NOT_FOUND = -11,
ARW_ERROR_LENGTH_UNAVAILABLE = -12,
ARW_ERROR_DEVICE_UNAVAILABLE = -13
};
public enum AR_LOG_LEVEL
{
AR_LOG_LEVEL_DEBUG = 0,
AR_LOG_LEVEL_INFO,
AR_LOG_LEVEL_WARN,
AR_LOG_LEVEL_ERROR,
AR_LOG_LEVEL_REL_INFO
}
///
/// @brief Values for device position, as encoded in ARVideoSourceInfoT.flags & AR_VIDEO_SOURCE_INFO_POSITION_MASK.
///
public enum AR_VIDEO_POSITION
{
AR_VIDEO_POSITION_UNKNOWN = 0x0000, ///< Camera physical position on device unknown.
AR_VIDEO_POSITION_FRONT = 0x0008, ///< Camera is on front of device pointing towards user.
AR_VIDEO_POSITION_BACK = 0x0010, ///< Camera is on back of device pointing away from user.
AR_VIDEO_POSITION_LEFT = 0x0018, ///< Camera is on left of device pointing to user's left.
AR_VIDEO_POSITION_RIGHT = 0x0020, ///< Camera is on right of device pointing to user's right.
AR_VIDEO_POSITION_TOP = 0x0028, ///< Camera is on top of device pointing toward ceiling when device is held upright.
AR_VIDEO_POSITION_BOTTOM = 0x0030, ///< Camera is on bottom of device pointing towards floor when device is held upright.
AR_VIDEO_POSITION_OTHER = 0x0038, ///< Camera physical position on device is known but none of the above.
}
///
/// @brief Values for device stereo mode, as encoded in ARVideoSourceInfoT.flags & AR_VIDEO_SOURCE_INFO_STEREO_MODE_MASK.
///
public enum AR_VIDEO_STEREO_MODE
{
AR_VIDEO_STEREO_MODE_MONO = 0x0000, ///< Device is monoscopic.
AR_VIDEO_STEREO_MODE_LEFT = 0x0040, ///< Device is left eye of a stereoscopic pair.
AR_VIDEO_STEREO_MODE_RIGHT = 0x0080, ///< Device is right eye of a stereoscopic pair.
AR_VIDEO_STEREO_MODE_FRAME_SEQUENTIAL = 0x00C0, ///< Device is left and right stereo images in sequential frames.
AR_VIDEO_STEREO_MODE_SIDE_BY_SIDE = 0x0100, ///< Device is left and right stereo images in a single frame, arranged horizontally with left eye on left.
AR_VIDEO_STEREO_MODE_OVER_UNDER = 0x0140, ///< Device is left and right stereo images in a single frame, arranged vertically with left eye on top.
AR_VIDEO_STEREO_MODE_HALF_SIDE_BY_SIDE = 0x0180, ///< Device is left and right stereo images in a single frame with the frames scaled to half-width, arranged horizontally with left eye on left.
AR_VIDEO_STEREO_MODE_OVER_UNDER_HALF_HEIGHT = 0x01C0, ///< Device is left and right stereo images in a single frame with the frames scaled to half-height, arranged vertically with left eye on top.
AR_VIDEO_STEREO_MODE_ROW_INTERLACED = 0x0200, ///< Device is left and right stereo images in a single frame with row interleaving, where pixels in even-numbered rows are sampled from the left eye, and pixels in odd-number rows from the right eye.
AR_VIDEO_STEREO_MODE_COLUMN_INTERLACED = 0x0240, ///< Device is left and right stereo images in a single frame with column interleaving, where pixels in even-numbered columns are sampled from the left eye, and pixels in odd-number columns from the right eye.
AR_VIDEO_STEREO_MODE_ROW_AND_COLUMN_INTERLACED = 0x0280, ///< Device is left and right stereo images in a single frame with row and column interleaving, where pixels where the evenness/oddness of the row is the same as the column are sampled from the left eye, and the remaining pixels from the right eye.
AR_VIDEO_STEREO_MODE_ANAGLYPH_RG = 0x02C0, ///< Device is left and right stereo images in a single frame, where both eyes are converted to mono and the left eye is carried in the red channel and the right eye in the green channel.
AR_VIDEO_STEREO_MODE_ANAGLYPH_RB = 0x0300, ///< Device is left and right stereo images in a single frame, where both eyes are converted to mono and the left eye is carried in the red channel and the right eye in the blue channel.
AR_VIDEO_STEREO_MODE_RESERVED0 = 0x0340, ///< Reserved for future use.
AR_VIDEO_STEREO_MODE_RESERVED1 = 0x0380, ///< Reserved for future use.
AR_VIDEO_STEREO_MODE_RESERVED2 = 0x03C0, ///< Reserved for future use.
}
///
/// @brief Values for ARVideoSourceInfoT.flags.
///
public enum AR_VIDEO_SOURCE_INFO
{
AR_VIDEO_SOURCE_INFO_FLAG_OFFLINE = 0x0001, ///< 0 = unknown or not offline, 1 = offline.
AR_VIDEO_SOURCE_INFO_FLAG_IN_USE = 0x0002, ///< 0 = unknown or not in use, 1 = in use.
AR_VIDEO_SOURCE_INFO_FLAG_OPEN_ASYNC = 0x0004, ///< 0 = open normally, 1 = open async.
AR_VIDEO_SOURCE_INFO_POSITION_MASK = 0x0038, ///< compare (value & AR_VIDEO_SOURCE_INFO_POSITION_MASK) against enums.
AR_VIDEO_SOURCE_INFO_STEREO_MODE_MASK = 0x03C0, ///< compare (value & AR_VIDEO_SOURCE_INFO_STEREO_MODE_MASK) against enums.
}
///
/// @brief Values describing a video source.
///
public struct ARVideoSourceInfoT
{
public string name; ///< UTF-8 encoded string representing the name of the source, in a form suitable for presentation to an end-user, e.g. in a list of inputs.
public string model; ///< UTF-8 encoded string representing the model of the source, where this information is available. May be NULL if model information is not attainable.
public string UID; ///< UTF-8 encoded string representing a unique ID for this source, and suitable for passing to arVideoOpen/ar2VideoOpen as a UID in the configuration. May be NULL if sources cannot be uniquely identified.
public AR_VIDEO_SOURCE_INFO flags;
public string open_token; ///< UTF-8 encoded string containing the token that should be passed (in the space-separated list of tokens to arVideoOpen/ar2VideoOpen, in order to select this source to be opened. Note that this token is only valid so long as the underlying video hardware configuration does not change, so should not be stored between sessions.
}
// Private fields with accessors.
[SerializeField]
private ContentMode currentContentMode = ContentMode.Fit;
[SerializeField]
private ARToolKitThresholdMode currentThresholdMode = ARToolKitThresholdMode.Manual;
[SerializeField]
private int currentThreshold = 100;
[SerializeField]
private ARToolKitLabelingMode currentLabelingMode = ARToolKitLabelingMode.BlackRegion;
[SerializeField]
private int currentTemplateSize = 16;
[SerializeField]
private int currentTemplateCountMax = 25;
[SerializeField]
private float currentBorderSize = 0.25f;
[SerializeField]
private ARToolKitPatternDetectionMode currentPatternDetectionMode = ARToolKitPatternDetectionMode.AR_TEMPLATE_MATCHING_COLOR;
[SerializeField]
private ARToolKitMatrixCodeType currentMatrixCodeType = ARToolKitMatrixCodeType.AR_MATRIX_CODE_3x3;
[SerializeField]
private ARToolKitImageProcMode currentImageProcMode = ARToolKitImageProcMode.AR_IMAGE_PROC_FRAME_IMAGE;
[SerializeField]
private bool currentUseVideoBackground = true;
[SerializeField]
private bool currentNFTMultiMode = false;
[SerializeField]
private AR_LOG_LEVEL currentLogLevel = AR_LOG_LEVEL.AR_LOG_LEVEL_INFO;
// Main reference to the plugin functions. Created in OnEnable, destroyed in OnDisable().
private IPluginFunctions pluginFunctions = null;
private ARVideoConfig arvideoconfig = null;
//
// MonoBehavior methods.
//
void Awake()
{
Log(LogTag + "ARController.Awake())");
}
void OnEnable()
{
pluginFunctions = new PluginFunctionsARX();
arvideoconfig = gameObject.GetComponent<ARVideoConfig>();
#if !UNITY_EDITOR
# if UNITY_IOS
ARX_pinvoke.aruRequestCamera();
Thread.Sleep(2000);
# endif
#endif // !UNITY_EDITOR
Log(LogTag + "ARController.OnEnable()");
Application.runInBackground = true;
// Register the log callback. This can be set irrespective of whether PluginFunctions.inited is true or false.
switch (Application.platform)
{
case RuntimePlatform.OSXEditor: // Unity Editor on OS X.
case RuntimePlatform.OSXPlayer: // Unity Player on OS X.
case RuntimePlatform.WindowsEditor: // Unity Editor on Windows.
case RuntimePlatform.WindowsPlayer: // Unity Player on Windows.
case RuntimePlatform.LinuxEditor:
case RuntimePlatform.LinuxPlayer:
case RuntimePlatform.WSAPlayerX86: // Unity Player on Windows Store X86.
case RuntimePlatform.WSAPlayerX64: // Unity Player on Windows Store X64.
case RuntimePlatform.WSAPlayerARM: // Unity Player on Windows Store ARM.
pluginFunctions.arwRegisterLogCallback(Log);
break;
case RuntimePlatform.Android: // Unity Player on Android.
case RuntimePlatform.IPhonePlayer: // Unity Player on iOS.
break;
default:
break;
}
// ARController is up, so init.
if (!pluginFunctions.IsInited())
{
InitializeAR();
}
}
public List<ARVideoSourceInfoT> GetVideoSourceInfoList(string config)
{
List<ARVideoSourceInfoT> l = new List<ARVideoSourceInfoT>();
if (pluginFunctions == null)
{
return l;
}
int count = pluginFunctions.arwCreateVideoSourceInfoList(config);
if (count > 0)
{
for (int i = 0; i < count; i++)
{
ARVideoSourceInfoT si = new ARVideoSourceInfoT();
int flags;
bool ok = pluginFunctions.arwGetVideoSourceInfoListEntry(i, out si.name, out si.model, out si.UID, out flags, out si.open_token);
if (ok)
{
si.flags = (AR_VIDEO_SOURCE_INFO)flags; // Coerce type.
l.Add(si);
}
}
pluginFunctions.arwDeleteVideoSourceInfoList();
}
return l;
}
private void InitializeAR()
{
if (!pluginFunctions.IsInited())
{
if (pluginFunctions.arwInitialiseAR(TemplateSize, TemplateCountMax))
{
// artoolkitX version number
_version = pluginFunctions.arwGetARToolKitVersion();
Log(LogTag + "artoolkitX version " + _version + " initialised.");
}
else
{
Log(LogTag + "Error initialising artoolkitX");
}
}
// ARTrackables may be loaded once the plugin is initialised. The ARController
// is now responsible for letting the ARTrackable know that this is ready by
// passing a reference to the plugin.
// This code has a matching block in FinalizeAR which undoes the reference.
if (pluginFunctions.IsInited())
{
// Ensure ARTrackable objects that were instantiated/deserialized before the native interface came up are all loaded.
ARTrackable[] trackables = FindObjectsOfType<ARTrackable>();
foreach (ARTrackable t in trackables)
{
t.PluginFunctions = pluginFunctions;
}
}
}
void Start()
{
Log(LogTag + "ARController.Start(): Application.isPlaying = " + Application.isPlaying + " autoStart: " + AutoStartAR);
if (!Application.isPlaying) return; // Editor Start.
// Player start.
if (AutoStartAR)
{
if (!StartAR())
{
//Application.Quit();
}
}
}
void OnApplicationPause(bool paused)
{
//Log(LogTag + "ARController.OnApplicationPause(" + paused + ")");
if (paused)
{
if (_running)
{
StopAR();
_runOnUnpause = true;
}
}
else
{
if (_runOnUnpause)
{
StartAR();
_runOnUnpause = false;
}
}
}
void Update()
{
//Log(LogTag + "ARController.Update()");
if (!Application.isPlaying) return; // Editor update.
// Player update.
if (Input.GetKeyDown(KeyCode.Menu) || Input.GetKeyDown(KeyCode.Return)) showGUIDebug = !showGUIDebug;
if (QuitOnEscOrBack && Input.GetKeyDown(KeyCode.Escape)) Application.Quit(); // On Android, maps to "back" button.
CalculateFPS();
UpdateAR();
}
// Called when the user quits the application, or presses stop in the editor.
void OnApplicationQuit()
{
//Log(LogTag + "ARController.OnApplicationQuit()");
StopAR();
}
void OnDisable()
{
Log(LogTag + "ARController.OnDisable()");
if (pluginFunctions.IsInited())
{
FinalizeAR();
}
// Since we might be going away, tell users of our Log function
// to stop calling it.
switch (Application.platform)
{
case RuntimePlatform.OSXEditor:
case RuntimePlatform.OSXPlayer:
goto case RuntimePlatform.WindowsPlayer;
case RuntimePlatform.WindowsEditor:
case RuntimePlatform.WindowsPlayer:
//case RuntimePlatform.LinuxEditor:
case RuntimePlatform.LinuxPlayer:
pluginFunctions.arwRegisterLogCallback(null);
break;
case RuntimePlatform.Android:
break;
case RuntimePlatform.IPhonePlayer:
break;
case RuntimePlatform.WSAPlayerX86:
case RuntimePlatform.WSAPlayerX64:
case RuntimePlatform.WSAPlayerARM:
pluginFunctions.arwRegisterLogCallback(null);
break;
default:
break;
}
pluginFunctions = null;
}
void FinalizeAR()
{
//Log(LogTag + "ARController.FinalizeAR()");
if (_running) {
StopAR();
}
if (pluginFunctions.IsInited()) {
// Ensure ARTrackable objects are all unloaded.
ARTrackable[] trackables = FindObjectsOfType<ARTrackable>();
foreach (ARTrackable t in trackables)
{
t.PluginFunctions = null;
}
Log(LogTag + "Shutting down artoolkitX");
// arwShutdownAR() causes everything artoolkitX holds to be unloaded.
if (!pluginFunctions.arwShutdownAR())
{
Log(LogTag + "Error shutting down artoolkitX.");
}
}
}
// As OnDestroy() is called from the ARController object's destructor, don't do anything
// here that assumes that the ARController object is still valid. Do that sort of shutdown
// in OnDisable() instead.
void OnDestroy()
{
//Log(LogTag + "ARController.OnDestroy()");
// Classes inheriting from MonoBehavior should set all static member variables to null on unload.
//none.
}
//
// User-callable AR methods.
//
public bool StartAR()
{
// Catch attempts to inadvertently call StartAR() twice.
if (_running)
{
Log(LogTag + "WARNING: StartAR() called while already running. Ignoring.\n");
return false;
}
// For late startup after configuration, StartAR needs to ensure InitialiseAR has been called.
if (!pluginFunctions.IsInited())
{
InitializeAR();
}
if (pluginFunctions.IsInited())
{
Log(LogTag + "Starting AR.");
_sceneConfiguredForVideo = _sceneConfiguredForVideoWaitingMessageLogged = false;
// Check rendering device.
string renderDevice = SystemInfo.graphicsDeviceVersion;
_useNativeGLTexturing = !renderDevice.StartsWith("Direct") && UseNativeGLTexturingIfAvailable;
if (_useNativeGLTexturing)
{
Log(LogTag + "Render device: " + renderDevice + ", using native GL texturing.");
}
else
{
Log(LogTag + "Render device: " + renderDevice + ", using Unity texturing.");
}
CreateClearCamera();
// Retrieve video configuration, and append any required per-platform overrides.
// For native GL texturing we need monoplanar video; iOS and Android default to biplanar format.
string videoConfiguration0 = arvideoconfig.GetVideoConfigString();
string videoConfiguration1 = arvideoconfig.GetVideoConfigString(true);
switch (Application.platform)
{
case RuntimePlatform.OSXEditor:
case RuntimePlatform.OSXPlayer:
if (_useNativeGLTexturing || !AllowNonRGBVideo)
{
if (videoConfiguration0.IndexOf("-device=QuickTime7") != -1 || videoConfiguration0.IndexOf("-device=QUICKTIME") != -1) videoConfiguration0 += " -pixelformat=BGRA";
if (videoConfiguration1.IndexOf("-device=QuickTime7") != -1 || videoConfiguration1.IndexOf("-device=QUICKTIME") != -1) videoConfiguration1 += " -pixelformat=BGRA";
}
break;
case RuntimePlatform.WindowsEditor:
case RuntimePlatform.WindowsPlayer:
if (_useNativeGLTexturing || !AllowNonRGBVideo)
{
if (videoConfiguration0.IndexOf("-device=WinMF") != -1) videoConfiguration0 += " -format=BGRA";
if (videoConfiguration1.IndexOf("-device=WinMF") != -1) videoConfiguration1 += " -format=BGRA";
}
break;
case RuntimePlatform.Android:
videoConfiguration0 += " -cachedir=\"" + Application.temporaryCachePath + "\"" + (_useNativeGLTexturing || !AllowNonRGBVideo ? " -format=RGBA" : "");
videoConfiguration1 += " -cachedir=\"" + Application.temporaryCachePath + "\"" + (_useNativeGLTexturing || !AllowNonRGBVideo ? " -format=RGBA" : "");
break;
case RuntimePlatform.IPhonePlayer:
videoConfiguration0 += (_useNativeGLTexturing || !AllowNonRGBVideo ? " -format=BGRA" : "");
videoConfiguration1 += (_useNativeGLTexturing || !AllowNonRGBVideo ? " -format=BGRA" : "");
break;
//case RuntimePlatform.LinuxEditor:
case RuntimePlatform.LinuxPlayer:
default:
break;
}
// Load the default camera parameters.
byte[] cparam0 = null;
byte[] cparam1 = null;
byte[] transL2R = null;
if (!string.IsNullOrEmpty(videoCParamName0))
{
TextAsset ta = Resources.Load("ardata/" + videoCParamName0, typeof(TextAsset)) as TextAsset;
if (ta == null)
{
// Error - the camera_para.dat file isn't in the right place
Log(LogTag + "StartAR(): Error: Camera parameters file not found at Resources/ardata/" + videoCParamName0 + ".bytes");
return (false);
}
cparam0 = ta.bytes;
}
if (VideoIsStereo)
{
if (!string.IsNullOrEmpty(videoCParamName1))
{
TextAsset ta = Resources.Load("ardata/" + videoCParamName1, typeof(TextAsset)) as TextAsset;
if (ta == null)
{
// Error - the camera_para.dat file isn't in the right place
Log(LogTag + "StartAR(): Error: Camera parameters file not found at Resources/ardata/" + videoCParamName1 + ".bytes");
return (false);
}
cparam1 = ta.bytes;
}
TextAsset ta1 = Resources.Load("ardata/" + transL2RName, typeof(TextAsset)) as TextAsset;
if (ta1 == null)
{
// Error - the transL2R.dat file isn't in the right place
Log(LogTag + "StartAR(): Error: The stereo calibration file not found at Resources/ardata/" + transL2RName + ".bytes");
return (false);
}
transL2R = ta1.bytes;
}
// Begin video capture and marker detection.
if (!VideoIsStereo)
{
Log(LogTag + "Starting artoolkitX video with vconf '" + videoConfiguration0 + "'.");
_running = pluginFunctions.arwStartRunningB(videoConfiguration0, cparam0, (cparam0 != null ? cparam0.Length : 0));
}
else
{
Log(LogTag + "Starting artoolkitX video with vconfL '" + videoConfiguration0 + "', vconfR '" + videoConfiguration1 + "'.");
_running = pluginFunctions.arwStartRunningStereoB(videoConfiguration0, cparam0, (cparam0 != null ? cparam0.Length : 0), videoConfiguration1, cparam1, (cparam1 != null ? cparam1.Length : 0), transL2R, (transL2R != null ? transL2R.Length : 0));
}
if (!_running)
{
Log(LogTag + "Error starting running");
ARW_ERROR error = (ARW_ERROR)pluginFunctions.arwGetError();
if (error == ARW_ERROR.ARW_ERROR_DEVICE_UNAVAILABLE)
{
showGUIErrorDialogContent = "Unable to start AR tracking. The camera may be in use by another application.";
}
else
{
showGUIErrorDialogContent = "Unable to start AR tracking. Please check that you have a camera connected.";
}
showGUIErrorDialog = true;
return false;
}
// After calling arwStartRunningB/arwStartRunningStereoB, set artoolkitX configuration.
Log(LogTag + "Setting artoolkitX tracking settings.");
VideoThreshold = currentThreshold;
VideoThresholdMode = currentThresholdMode;
LabelingMode = currentLabelingMode;
BorderSize = currentBorderSize;
PatternDetectionMode = currentPatternDetectionMode;
MatrixCodeType = currentMatrixCodeType;
ImageProcMode = currentImageProcMode;
NFTMultiMode = currentNFTMultiMode;
// Prevent display sleep.
Screen.sleepTimeout = SleepTimeout.NeverSleep;
}
// Remaining Unity setup happens in UpdateAR().
return true;
}
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_ANDROID)
private ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
# if UNITY_ANDROID
private int screenWidth = 0;
private int screenHeight = 0;
# endif
#endif
bool UpdateAR()
{
// For late startup after configuration, UpdateAR needs to ensure StartAR has been called.
if (!_running)
{
StartAR();
if (!_running) {
return true;
}
}
if (!_sceneConfiguredForVideo)
{
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_ANDROID)
screenOrientation = Screen.orientation;
#if UNITY_ANDROID
screenWidth = Screen.width;
screenHeight = Screen.height;
#endif
#endif
// Wait for the wrapper to confirm video frames have arrived before configuring our video-dependent stuff.
if (!pluginFunctions.arwIsRunning())
{
if (!_sceneConfiguredForVideoWaitingMessageLogged)
{
Log(LogTag + "UpdateAR: Waiting for artoolkitX video.");
_sceneConfiguredForVideoWaitingMessageLogged = true;
}
}
else
{
Log(LogTag + "UpdateAR: artoolkitX video is running. Configuring Unity scene for video.");
// Retrieve artoolkitX video source(s) frame size and format, and projection matrix, and store globally.
// Then create the required object(s) to instantiate a mesh/meshes with the frame texture(s).
// Each mesh lives in a separate "video background" layer.
if (!VideoIsStereo)
{
// artoolkitX video size and format.
bool ok1 = pluginFunctions.arwGetVideoParams(out _videoWidth0, out _videoHeight0, out _videoPixelSize0, out _videoPixelFormatString0);
if (!ok1) return false;
Log(LogTag + "Video " + _videoWidth0 + "x" + _videoHeight0 + "@" + _videoPixelSize0 + "Bpp (" + _videoPixelFormatString0 + ")");
// artoolkitX projection matrix adjusted for Unity
float[] projRaw = new float[16];
pluginFunctions.arwGetProjectionMatrix(NearPlane, FarPlane, projRaw);
_videoProjectionMatrix0 = ARUtilityFunctions.MatrixFromFloatArray(projRaw);
Log(LogTag + "Projection matrix: [" + Environment.NewLine + _videoProjectionMatrix0.ToString().Trim() + "]");
if (ContentRotate90) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.AngleAxis(90.0f, Vector3.back), Vector3.one) * _videoProjectionMatrix0;
if (ContentFlipV) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1.0f, -1.0f, 1.0f)) * _videoProjectionMatrix0;
if (ContentFlipH) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(-1.0f, 1.0f, 1.0f)) * _videoProjectionMatrix0;
_videoBackgroundMeshGO0 = CreateVideoBackgroundMesh(0, _videoWidth0, _videoHeight0, BackgroundLayer0, out _videoColorArray0, out _videoColor32Array0, out _videoTexture0, out _videoMaterial0);
if (_videoBackgroundMeshGO0 == null || _videoTexture0 == null || _videoMaterial0 == null)
{
Log(LogTag + "Error: unable to create video mesh.");
}
}
else
{
// artoolkitX stereo video size and format.
bool ok1 = pluginFunctions.arwGetVideoParamsStereo(out _videoWidth0, out _videoHeight0, out _videoPixelSize0, out _videoPixelFormatString0, out _videoWidth1, out _videoHeight1, out _videoPixelSize1, out _videoPixelFormatString1);
if (!ok1) return false;
Log(LogTag + "Video left " + _videoWidth0 + "x" + _videoHeight0 + "@" + _videoPixelSize0 + "Bpp (" + _videoPixelFormatString0 + "), right " + _videoWidth1 + "x" + _videoHeight1 + "@" + _videoPixelSize1 + "Bpp (" + _videoPixelFormatString1 + ")");
// artoolkitX projection matrices, adjusted for Unity
float[] projRaw0 = new float[16];
float[] projRaw1 = new float[16];
pluginFunctions.arwGetProjectionMatrixStereo(NearPlane, FarPlane, projRaw0, projRaw1);
_videoProjectionMatrix0 = ARUtilityFunctions.MatrixFromFloatArray(projRaw0);
_videoProjectionMatrix1 = ARUtilityFunctions.MatrixFromFloatArray(projRaw1);
Log(LogTag + "Projection matrix left: [" + Environment.NewLine + _videoProjectionMatrix0.ToString().Trim() + "], right: [" + Environment.NewLine + _videoProjectionMatrix1.ToString().Trim() + "]");
if (ContentRotate90) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.AngleAxis(90.0f, Vector3.back), Vector3.one) * _videoProjectionMatrix0;
if (ContentRotate90) _videoProjectionMatrix1 = Matrix4x4.TRS(Vector3.zero, Quaternion.AngleAxis(90.0f, Vector3.back), Vector3.one) * _videoProjectionMatrix1;
if (ContentFlipV) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1.0f, -1.0f, 1.0f)) * _videoProjectionMatrix0;
if (ContentFlipV) _videoProjectionMatrix1 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1.0f, -1.0f, 1.0f)) * _videoProjectionMatrix1;
if (ContentFlipH) _videoProjectionMatrix0 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(-1.0f, 1.0f, 1.0f)) * _videoProjectionMatrix0;
if (ContentFlipH) _videoProjectionMatrix1 = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(-1.0f, 1.0f, 1.0f)) * _videoProjectionMatrix1;
_videoBackgroundMeshGO0 = CreateVideoBackgroundMesh(0, _videoWidth0, _videoHeight0, BackgroundLayer0, out _videoColorArray0, out _videoColor32Array0, out _videoTexture0, out _videoMaterial0);
_videoBackgroundMeshGO1 = CreateVideoBackgroundMesh(1, _videoWidth1, _videoHeight1, BackgroundLayer1, out _videoColorArray1, out _videoColor32Array1, out _videoTexture1, out _videoMaterial1);
if (_videoBackgroundMeshGO0 == null || _videoTexture0 == null || _videoMaterial0 == null || _videoBackgroundMeshGO1 == null || _videoTexture1 == null || _videoMaterial1 == null)
{
Log(LogTag + "Error: unable to create video background mesh.");
}
}
// Create background camera(s) to actually view the "video background" layer(s).
bool haveStereoARCameras = false;
ARCamera[] arCameras = FindObjectsOfType(typeof(ARCamera)) as ARCamera[];
foreach (ARCamera arc in arCameras)
{
if (arc.Stereo) haveStereoARCameras = true;
}
if (!haveStereoARCameras)
{
// Mono display.
// Use only first video source, regardless of whether VideoIsStereo.
// (The case where stereo video source is used with a mono display is not likely to be common.)
_videoBackgroundCameraGO0 = CreateVideoBackgroundCamera("Video background", BackgroundLayer0, out _videoBackgroundCamera0);
if (_videoBackgroundCameraGO0 == null || _videoBackgroundCamera0 == null)
{
Log(LogTag + "Error: unable to create video background camera.");
}
}
else
{
// Stereo display.
// If not VideoIsStereo, right eye will display copy of video frame.
_videoBackgroundCameraGO0 = CreateVideoBackgroundCamera("Video background (L)", BackgroundLayer0, out _videoBackgroundCamera0);
_videoBackgroundCameraGO1 = CreateVideoBackgroundCamera("Video background (R)", (VideoIsStereo ? BackgroundLayer1 : BackgroundLayer0), out _videoBackgroundCamera1);
if (_videoBackgroundCameraGO0 == null || _videoBackgroundCamera0 == null || _videoBackgroundCameraGO1 == null || _videoBackgroundCamera1 == null)
{
Log(LogTag + "Error: unable to create video background camera.");
}
}
// Setup foreground cameras for the video configuration.
ConfigureForegroundCameras();
// Adjust viewports of both background and foreground cameras.
ConfigureViewports();
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_ANDROID)
UpdateVideoTexture();
#endif
Log(LogTag + "Scene configured for video.");
_sceneConfiguredForVideo = true;
} // !running
} // !sceneConfiguredForVideo
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_ANDROID)
# if UNITY_IOS
if (Screen.orientation != screenOrientation) {
UpdateVideoTexture();
}
# elif UNITY_ANDROID
if ((Screen.width != screenWidth) || (Screen.height != screenHeight)) {
UpdateVideoTexture();
} else if (Screen.orientation != screenOrientation) {
screenWidth = screenHeight = 0; // Force video texture update on next pass.
}
# endif
#endif
bool gotFrame = pluginFunctions.arwCapture();
if (gotFrame)
{
if (!pluginFunctions.arwUpdateAR()) return false;
if (_sceneConfiguredForVideo && UseVideoBackground)
{
UpdateTexture();
}
}
return true;
}
public bool StopAR()
{
if (!_running)
{
return false;
}
Log(LogTag + "Stopping AR.");
// Stop video capture and marker detection.
if (!pluginFunctions.arwStopRunning())
{
Log(LogTag + "Error stopping AR.");
}
// Clean up.
DestroyVideoBackground();
DestroyClearCamera();
// Reset display sleep.
Screen.sleepTimeout = SleepTimeout.SystemSetting;
_running = false;
return true;
}
//
// User-callable configuration methods.
//
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_ANDROID)
public void UpdateVideoTexture()
{
ScreenOrientation screenOrientation = Screen.orientation;
# if UNITY_ANDROID
screenWidth = Screen.width;
screenHeight = Screen.height;
# endif
Matrix4x4 deviceRotation;
int height = _videoHeight0;
int width = _videoWidth0;
switch (screenOrientation) {
case ScreenOrientation.Portrait:
Log(LogTag + "ScreenOrientation.Portrait");