Last active
May 19, 2022 02:42
-
-
Save a55987482/49b9ede2ef1905e9a9f6e78f4b54fb56 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#if URS_USE_AR_FOUNDATION | |
using System.Collections; | |
using System.Collections.Generic; | |
using System; | |
using UnityEngine; | |
using UnityEngine.UI; | |
using UnityEngine.XR.ARFoundation; | |
using UnityEngine.Rendering; | |
namespace Unity.RenderStreaming.Samples | |
{ | |
public class AnchorSample : MonoBehaviour | |
{ | |
[SerializeField] private RenderStreaming renderStreaming; | |
[SerializeField] private Button startButton; | |
[SerializeField] private Button stopButton; | |
[SerializeField] private RawImage remoteVideoImage_Alpha; | |
[SerializeField] private VideoStreamReceiver receiveVideoViewer; | |
[SerializeField] private SingleConnection connection; | |
[SerializeField] private DataSender dataSender; | |
[SerializeField] private Text CheckDecodeTimestampText; | |
[SerializeField] private Text CheckTsStringText; | |
[SerializeField] private CameraFrame cameraFrame; | |
[SerializeField] private Text CoroutineStatus; | |
[SerializeField] RawImage m_RawCameraImage; | |
[SerializeField] Text FPS; | |
private long tsLong; | |
private string _connectionId; | |
private IEnumerator StartCheckTimestampInFrame = null; | |
private bool CheckTimestampStatus; | |
private bool requestFlag = true; | |
private Texture remoteVideoImage; | |
float _time = 0; | |
int _counter = 0; | |
private long tsLongBefore = 0; | |
private bool isCoroutineRunning = false; | |
private float avgFrameRate; | |
public RawImage rawCameraImage | |
{ | |
get => m_RawCameraImage; | |
set => m_RawCameraImage = value; | |
} | |
void Start() | |
{ | |
if (!renderStreaming.runOnAwake) | |
{ | |
renderStreaming.Run( | |
hardwareEncoder: RenderStreamingSettings.EnableHWCodec, | |
signaling: RenderStreamingSettings.Signaling); | |
} | |
} | |
void Awake() | |
{ | |
startButton.onClick.AddListener(CreateConnection); | |
stopButton.onClick.AddListener(DeleteConnection); | |
m_RawCameraImage.gameObject.SetActive(false); | |
startButton.gameObject.SetActive(true); | |
stopButton.gameObject.SetActive(false); | |
// Set rawimage invisible when startup. | |
remoteVideoImage_Alpha.gameObject.SetActive(false); | |
//receiveVideoViewer.OnUpdateReceiveTexture += texture => remoteVideoImage_Alpha.texture = texture; | |
dataSender.OnNewStreamSenderAdded += OnNewStreamSenderAdded; | |
receiveVideoViewer.OnUpdateReceiveTexture += MyFunc; | |
} | |
void Update() | |
{ | |
if (CheckTimestampStatus == true) | |
{ | |
CheckTimestampInFrame(); | |
} | |
} | |
public void CreateConnection() | |
{ | |
_connectionId = System.Guid.NewGuid().ToString("N"); | |
connection.CreateConnection(_connectionId); | |
startButton.gameObject.SetActive(false); | |
stopButton.gameObject.SetActive(true); | |
// Set rawimage visible when the start button is triggered. | |
remoteVideoImage_Alpha.gameObject.SetActive(true); | |
// Set rawimage component size. | |
remoteVideoImage_Alpha.GetComponent<RectTransform>().sizeDelta = new Vector2(Display.main.systemWidth, Display.main.systemHeight); | |
//remoteVideoImage_Alpha.GetComponent<RectTransform>().sizeDelta = new Vector2(1024, 768); | |
} | |
void DeleteConnection() | |
{ | |
connection.DeleteConnection(_connectionId); | |
_connectionId = null; | |
startButton.gameObject.SetActive(true); | |
stopButton.gameObject.SetActive(false); | |
// Set rawimage visible when the stop button is triggered. | |
remoteVideoImage_Alpha.gameObject.SetActive(false); | |
} | |
private void OnNewStreamSenderAdded() | |
{ | |
Debug.Log("ARFoundationSample OnNewStreamSenderAdded"); | |
this.DeleteConnection(); | |
this.CreateConnection(); | |
} | |
private void MyFunc(Texture texture) | |
{ | |
if (texture == null) | |
{ | |
CheckTimestampStatus = false; | |
Debug.LogError("No texture"); | |
} | |
else | |
{ | |
CheckTimestampStatus = true; | |
Debug.LogError("Has texture"); | |
remoteVideoImage = texture; | |
if (isCoroutineRunning == false) | |
{ | |
Debug.Log("isInStartCoroutine"); | |
StartCheckTimestampInFrame = CheckTimestampInFrame(); | |
StartCoroutine(StartCheckTimestampInFrame); | |
} | |
} | |
Debug.LogError($"Now flag status: {CheckTimestampStatus}"); | |
} | |
//private Texture2D receiveTexture2D = null; | |
private Texture2D copy_remoteVideoImage = null; | |
//private Texture2D copy_CorrectCameraFrame = null; | |
IEnumerator CheckTimestampInFrame() | |
{ | |
Debug.Log("isInCoroutine"); | |
while (true) | |
{ | |
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); | |
avgFrameRate = Time.frameCount / Time.time; | |
FPS.text = "FPS: " + avgFrameRate.ToString(); | |
Debug.Log($"========\nCoroutine FPS :{avgFrameRate}\n========"); | |
CoroutineStatus.text = "CoroutineStatus: " + isCoroutineRunning.ToString(); | |
isCoroutineRunning = true; | |
CoroutineStatus.text = "CoroutineStatus: " + isCoroutineRunning.ToString(); | |
Debug.LogError($"remoteImageSize: {remoteVideoImage.width}x{remoteVideoImage.height}"); | |
// We should only read the screen buffer after rendering is complete | |
yield return new WaitForEndOfFrame(); | |
if (copy_remoteVideoImage == null) | |
{ | |
copy_remoteVideoImage = new Texture2D(remoteVideoImage.width, remoteVideoImage.height, TextureFormat.ARGB32, false); | |
} | |
sw.Start(); | |
Graphics.CopyTexture(remoteVideoImage, copy_remoteVideoImage); | |
sw.Stop(); | |
Debug.Log(string.Format("====================================\n CopyTextureTime {0} ms", sw.ElapsedMilliseconds)); | |
sw.Reset(); | |
sw.Start(); | |
Debug.Log($"requestFlag: {requestFlag}"); | |
if (requestFlag) | |
{ | |
requestFlag = false; | |
AsyncGPUReadback.Request(copy_remoteVideoImage, 0, TextureFormat.RGBA32, OnCompleteReadback); | |
} | |
sw.Stop(); | |
Debug.Log(string.Format("ReadPixelTime {0} ms", sw.ElapsedMilliseconds)); | |
sw.Reset(); | |
yield return null; | |
isCoroutineRunning = false; | |
} | |
} | |
void OnCompleteReadback(AsyncGPUReadbackRequest request) | |
{ | |
if (request.hasError) | |
{ | |
Debug.Log("GPU readback error detected."); | |
return; | |
} | |
int totalHeight = 5 * 32; | |
int dataWidth = 5; | |
int totalWidth = dataWidth * 64; | |
string tsString = ""; | |
int targetRow = totalHeight / 2; | |
int targetCol = copy_remoteVideoImage.width - totalWidth + dataWidth / 2; | |
int startPixel = 0 + dataWidth / 2; | |
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); | |
sw.Start(); | |
//var colorArray = request.GetData<Color32>().ToArray(); | |
var colorArray = request.GetData<Color32>().GetSubArray(targetCol, totalWidth); | |
sw.Stop(); | |
Debug.Log(string.Format("GetDataTime {0} ms", sw.ElapsedMilliseconds)); | |
sw.Reset(); | |
//for (int i = 0; i < colorArray.Length; i++) | |
//{ | |
// Debug.Log($"Length:{colorArray.Length}"); | |
// Debug.Log($"Color:{colorArray[i]}"); | |
//} | |
sw.Start(); | |
for (int x = startPixel; x < colorArray.Length; x += dataWidth) | |
{ | |
//Debug.Log($"Color:{colorArray[x]}"); | |
if (colorArray[x][0] > 128) | |
{ | |
tsString += "1"; | |
} | |
else | |
{ | |
tsString += "0"; | |
} | |
} | |
Debug.Log($"tsString: {tsString}"); | |
tsLong = Convert.ToInt64(tsString, 2); | |
Debug.LogError("tsLong: " + tsLong + " tsString: " + tsString + "width: " + remoteVideoImage.width + " height: " + remoteVideoImage.height + " targetRow: " + targetRow); | |
sw.Stop(); | |
Debug.Log(string.Format("DecodeTimestampTime {0} ms", sw.ElapsedMilliseconds)); | |
sw.Start(); | |
//CheckTsStringText.text = tsString; | |
CheckDecodeTimestampText.text = tsLong.ToString(); | |
_time += Time.deltaTime; | |
if (tsLongBefore != tsLong) | |
{ | |
_counter += 1; | |
if (_time >= 1f)//一秒 | |
{ | |
_time = 0; | |
Debug.Log("OneSecond"); | |
Debug.Log($"Count: {_counter}"); | |
_counter = 0; | |
} | |
Texture2D CorrectCameraFrame = cameraFrame.TakeOutCameraFrame(tsLong); | |
if (CorrectCameraFrame != null) | |
{ | |
Debug.Log("Found_Camera_Frame"); | |
remoteVideoImage_Alpha.material.SetTexture("_MainTex", copy_remoteVideoImage); | |
remoteVideoImage_Alpha.material.SetTexture("_CameraFrame", CorrectCameraFrame); | |
remoteVideoImage_Alpha.texture = remoteVideoImage_Alpha.material.GetTexture("_MainTex"); | |
Resources.UnloadUnusedAssets(); // unload unused Assets | |
System.GC.Collect(); // release memory | |
cameraFrame.CleanDictionary(tsLong); | |
Debug.LogError("FinishCleanCameraFrame"); | |
} | |
else | |
{ | |
Debug.LogError("No corresponding camera image found."); | |
} | |
} | |
else | |
{ | |
Debug.LogError("Timestamp repeat."); | |
} | |
tsLongBefore = tsLong; | |
sw.Stop(); | |
Debug.Log(string.Format("ShaderProcessTime {0} ms \n ====================================", sw.ElapsedMilliseconds)); | |
sw.Reset(); | |
requestFlag = true; | |
} | |
} | |
} | |
#endif |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using System.Collections; | |
using System.Collections.Generic; | |
using UnityEngine; | |
using UnityEngine.UI; | |
using System; | |
using Unity.Collections.LowLevel.Unsafe; | |
using UnityEngine.XR.ARFoundation; | |
using UnityEngine.XR.ARSubsystems; | |
namespace Unity.RenderStreaming.Samples | |
{ | |
public class CameraFrame : MonoBehaviour | |
{ | |
Dictionary<long, Texture2D> Frame_dict = new Dictionary<long, Texture2D>(); | |
private int FrameCount = 0; | |
private float avgFrameRate; | |
[SerializeField] Text framecount; | |
//[SerializeField] Text FPS; | |
[SerializeField] private ARSession session; | |
[SerializeField] Text FindCameraFrameText; | |
[SerializeField] Text CheckPeerTimestamp; | |
[SerializeField] Text DictionaryHasTimestamp; | |
[SerializeField] PlaceOnPlane placeOnPlane; | |
[SerializeField] ARCameraManager m_CameraManager; | |
public ARCameraManager cameraManager | |
{ | |
get => m_CameraManager; | |
set => m_CameraManager = value; | |
} | |
public int? frameRate { get; } | |
public bool matchFrameRateRequested { get; set; } | |
void OnBeforeRender() => UpdateCameraImage(); | |
private void OnEnable() | |
{ | |
//m_CameraManager.frameReceived += OnFrameReceived; | |
if (session.matchFrameRateRequested) | |
{ | |
Application.onBeforeRender += OnBeforeRender; | |
} | |
} | |
private void OnDisable() | |
{ | |
//m_CameraManager.frameReceived -= OnFrameReceived; | |
if (session.matchFrameRateRequested) | |
{ | |
Application.onBeforeRender -= OnBeforeRender; | |
} | |
} | |
unsafe void UpdateCameraImage() | |
{ | |
// Attempt to get the latest camera image. If this method succeeds, | |
// it acquires a native resource that must be disposed (see below). | |
if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image)) | |
{ | |
return; | |
} | |
// Display some information about the camera image | |
// m_ImageInfo.text = string.Format( | |
// "Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}", | |
// image.width, image.height, image.planeCount, image.timestamp, image.format); | |
// Once we have a valid XRCpuImage, we can access the individual image "planes" | |
// (the separate channels in the image). XRCpuImage.GetPlane provides | |
// low-overhead access to this data. This could then be passed to a | |
// computer vision algorithm. Here, we will convert the camera image | |
// to an RGBA texture and draw it on the screen. | |
// Choose an RGBA format. | |
// See XRCpuImage.FormatSupported for a complete list of supported formats. | |
var format = TextureFormat.RGBA32; | |
if (m_CameraTexture == null || m_CameraTexture.width != image.width || m_CameraTexture.height != image.height) | |
{ | |
m_CameraTexture = new Texture2D(image.width, image.height, format, false); | |
} | |
// Convert the image to format, flipping the image across the Y axis. | |
// We can also get a sub rectangle, but we'll get the full image here. | |
var conversionParams = new XRCpuImage.ConversionParams(image, format, m_Transformation); | |
// Texture2D allows us write directly to the raw texture data | |
// This allows us to do the conversion in-place without making any copies. | |
var rawTextureData = m_CameraTexture.GetRawTextureData<byte>(); | |
try | |
{ | |
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length); | |
//FPS.text = session.frameRate.ToString(); | |
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); | |
long ts = (DateTime.Now.ToUniversalTime().Ticks - 621355968000000000) / 10000; | |
placeOnPlane.getParam(ts); | |
Texture2D CameraTexture_copy = new Texture2D(image.width, image.height, format, false); | |
CameraTexture_copy.SetPixels(m_CameraTexture.GetPixels()); | |
CameraTexture_copy.Apply(); | |
// Convert type from double to long | |
//string s = image.timestamp.ToString(); | |
//long longRaceTime = long.Parse(s.Replace(".", "")); | |
StoredCameraFrame(ts, CameraTexture_copy); | |
placeOnPlane.SendJsonDataToServer(); | |
} | |
finally | |
{ | |
// We must dispose of the XRCpuImage after we're finished | |
// with it to avoid leaking native resources. | |
image.Dispose(); | |
//Destroy(CameraTexture_copy); // destroy Texture2D | |
//Resources.UnloadUnusedAssets(); // unload unused Assets | |
//System.GC.Collect(); // release memory | |
} | |
// Apply the updated texture data to our texture | |
m_CameraTexture.Apply(); | |
// Set the RawImage's texture so we can visualize it. | |
// m_RawCameraImage.texture = m_CameraTexture; | |
} | |
private void StoredCameraFrame(long timestamp, Texture2D cameraframe) | |
{ | |
framecount.text = FrameCount.ToString(); | |
Debug.Log($"FrameDictionaryCount: {Frame_dict.Count}"); | |
if (Frame_dict.Count <= 60) | |
{ | |
Frame_dict.Add(timestamp, cameraframe); | |
FrameCount += 1; | |
} | |
else | |
{ | |
//string itemDisplay = "items: "; | |
//foreach (KeyValuePair<long, Texture2D> item in Frame_dict) | |
//{ | |
// itemDisplay += item.Key + ", "; | |
//} | |
//Debug.LogError(itemDisplay); | |
foreach (KeyValuePair<long, Texture2D> item in Frame_dict) | |
{ | |
Destroy(item.Value); // destroy Texture2D | |
} | |
Frame_dict.Clear(); | |
FrameCount = 0; | |
Resources.UnloadUnusedAssets(); // unload unused Assets | |
System.GC.Collect(); // release memory | |
} | |
} | |
// Input timestamp to take out Camera frame. | |
public Texture2D TakeOutCameraFrame(long timestamp) | |
{ | |
Texture2D CorrectCameraFrame; | |
if (Frame_dict.ContainsKey(timestamp)) | |
{ | |
FindCameraFrameText.text = "Find Peer Camera Frame"; | |
DictionaryHasTimestamp.text = Frame_dict.ContainsKey(timestamp).ToString(); | |
CheckPeerTimestamp.text = timestamp.ToString(); | |
CorrectCameraFrame = Frame_dict[timestamp]; | |
} | |
else | |
{ | |
Debug.LogError("Can't not find cameraframe in TakeOutCameraFrame function."); | |
CorrectCameraFrame = null; | |
} | |
return CorrectCameraFrame; | |
} | |
public void CleanDictionary(long timestamp) | |
{ | |
List<long> itemsToRemove = new List<long>(); | |
foreach (KeyValuePair<long, Texture2D> item in Frame_dict) | |
{ | |
if (item.Key < timestamp) | |
{ | |
itemsToRemove.Add(item.Key); | |
} | |
} | |
Debug.Log($"Remove items count: {itemsToRemove.Count}"); | |
for (int i = 0; i < itemsToRemove.Count; i++) | |
{ | |
Destroy(Frame_dict[itemsToRemove[i]]); | |
Frame_dict.Remove(itemsToRemove[i]); | |
} | |
string itemDisplay = "items: "; | |
foreach (KeyValuePair<long, Texture2D> item in Frame_dict) | |
{ | |
itemDisplay += item.Key + ", "; | |
} | |
Debug.LogError(itemDisplay); | |
Debug.LogError($"Remaining amount:{Frame_dict.Count}"); | |
} | |
XRCpuImage.Transformation m_Transformation = XRCpuImage.Transformation.MirrorX; | |
private void OnFrameReceived(ARCameraFrameEventArgs eventArgs) | |
{ | |
avgFrameRate = Time.frameCount / Time.time; | |
Debug.Log($"========\n OnFrameReceived FPS :{avgFrameRate}\n========"); | |
UpdateCameraImage(); | |
} | |
Texture2D m_CameraTexture; | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using System.Collections; | |
using System.Collections.Generic; | |
using UnityEngine; | |
using UnityEngine.UI; | |
using System; | |
using UnityEngine.XR.ARFoundation; | |
using UnityEngine.XR.ARSubsystems; | |
// using UnityEngine.InputSystem; | |
// using UnityEngine.InputSystem.Controls; | |
using UnityEngine.Rendering; | |
using SimpleJSON; | |
namespace Unity.RenderStreaming.Samples | |
{ | |
/// <summary> | |
/// Listens for touch events and performs an AR raycast from the screen touch point. | |
/// AR raycasts will only hit detected trackables like feature points and planes. | |
/// | |
/// If a raycast hits a trackable, the <see cref="placedPrefab"/> is instantiated | |
/// and moved to the hit position. | |
/// </summary> | |
[RequireComponent(typeof(ARRaycastManager))] | |
/// <summary> | |
/// A component that can be used to access the most recently received basic light estimation information | |
/// for the physical environment as observed by an AR device. | |
/// </summary> | |
[RequireComponent(typeof(Light))] | |
[RequireComponent(typeof(ARAnchorManager))] | |
public class PlaceOnPlane : MonoBehaviour | |
{ | |
[SerializeField] | |
[Tooltip("Instantiates this prefab on a plane at the touch location.")] | |
GameObject m_PlacedPrefab; | |
[SerializeField] private ARSession session; | |
[SerializeField] | |
[Tooltip("The ARCameraManager which will produce frame events containing light estimation information.")] | |
ARCameraManager _ARcameraManager; | |
[SerializeField] private AnchorSample anchorSample; | |
[SerializeField] private DataSender dataSender; | |
[SerializeField] private Text debugText; | |
[SerializeField] private Text TouchStatus; | |
/// <summary> | |
/// The prefab to instantiate on touch. | |
/// </summary> | |
public GameObject placedPrefab | |
{ | |
get { return m_PlacedPrefab; } | |
set { m_PlacedPrefab = value; } | |
} | |
/// <summary> | |
/// The estimated color correction value of the physical environment, if available. | |
/// </summary> | |
public Color? colorCorrection { get; private set; } | |
/// <summary> | |
/// The object instantiated as a result of a successful raycast intersection with a plane. | |
/// </summary> | |
public GameObject spawnedObject { get; private set; } | |
public Matrix4x4? projectionMatrix { get; set; } | |
public Matrix4x4? modelMatrix { get; set; } | |
public Matrix4x4? viewMatrix { get; set; } | |
public Matrix4x4? modelViewMatrix { get; set; } | |
public long? timestampNs { get; set; } | |
JSONObject Data_JSON = new JSONObject(); | |
// <summary> | |
// Get or set the <c>ARCameraManager</c>. | |
// </summary> | |
public ARCameraManager cameraManager | |
{ | |
get { return _ARcameraManager; } | |
set | |
{ | |
if (_ARcameraManager == value) | |
return; | |
if (_ARcameraManager != null) | |
_ARcameraManager.frameReceived -= OnFrameReceived; | |
_ARcameraManager = value; | |
if (_ARcameraManager != null & enabled) | |
_ARcameraManager.frameReceived += OnFrameReceived; | |
} | |
} | |
public GameObject arCameraObject; | |
//private IEnumerator startSendParam = null; | |
void Awake() | |
{ | |
m_RaycastManager = GetComponent<ARRaycastManager>(); | |
m_Light = GetComponent<Light>(); | |
} | |
private void OnEnable() | |
{ | |
if (_ARcameraManager != null) | |
_ARcameraManager.frameReceived += OnFrameReceived; | |
} | |
private void OnDisable() | |
{ | |
if (_ARcameraManager != null) | |
_ARcameraManager.frameReceived -= OnFrameReceived; | |
} | |
private void OnFrameReceived(ARCameraFrameEventArgs eventArgs) | |
{ | |
if (eventArgs.projectionMatrix.HasValue) | |
{ | |
projectionMatrix = eventArgs.projectionMatrix.Value; | |
} | |
else | |
{ | |
} | |
// Check color correction has value. | |
if (eventArgs.lightEstimation.colorCorrection.HasValue) | |
{ | |
colorCorrection = eventArgs.lightEstimation.colorCorrection.Value; | |
m_Light.color = colorCorrection.Value; | |
} | |
else | |
{ | |
Color _colorCorrection = new Color(1f, 1f, 1f, 1f); | |
colorCorrection = _colorCorrection; | |
} | |
// Check timestamp has value. | |
if (eventArgs.timestampNs.HasValue) | |
{ | |
timestampNs = eventArgs.timestampNs.Value; | |
} | |
else | |
{ | |
} | |
} | |
IEnumerator Start() | |
{ | |
if ((ARSession.state == ARSessionState.None) || | |
(ARSession.state == ARSessionState.CheckingAvailability)) | |
{ | |
yield return ARSession.CheckAvailability(); | |
} | |
if (ARSession.state == ARSessionState.Unsupported) | |
{ | |
// Start some fallback experience for unsupported devices | |
Debug.LogError("AR foundation is not supported on this device."); | |
} | |
else | |
{ | |
// Start the AR session | |
session.enabled = true; | |
} | |
} | |
bool TryGetTouchPosition(out Vector2 touchPosition) | |
{ | |
if (Input.touchCount > 0) | |
{ | |
touchPosition = Input.GetTouch(0).position; | |
return true; | |
} | |
touchPosition = default; | |
return false; | |
} | |
void Update() | |
{ | |
// when user touch screen one second, | |
// touchCount will be increase about fifty times. | |
if (Input.touchCount == 1) | |
{ | |
// Get user first touch position. | |
var touch = Input.GetTouch(0); | |
if (touch.phase != TouchPhase.Began) | |
return; | |
// Get touch position. | |
if (!TryGetTouchPosition(out Vector2 touchPosition)) | |
return; | |
// If user hit a plane, show model and anchor's model matrix. | |
if (m_RaycastManager.Raycast(touchPosition, s_Hits, TrackableType.PlaneWithinPolygon)) | |
{ | |
// Raycast hits are sorted by distance, so the first one | |
// will be the closest hit. | |
var hitPose = s_Hits[0].pose; | |
if (spawnedObject == null) | |
{ | |
spawnedObject = Instantiate(m_PlacedPrefab, hitPose.position, hitPose.rotation); | |
Matrix4x4 m = Matrix4x4.TRS(hitPose.position, hitPose.rotation, new Vector3(1, 1, 1)); | |
Debug.LogError($"Prefab model matrix: {m}"); | |
TouchStatus.text = "first touch."; | |
anchorSample.CreateConnection(); | |
//startSendParam = getParam(); | |
//StartCoroutine(startSendParam); | |
} | |
else | |
{ | |
// Change 3D object location. | |
//spawnedObject.transform.position = hitPose.position; | |
TouchStatus.text = "second touch."; | |
//StopCoroutine(startSendParam); | |
//StartCoroutine(startSendParam); | |
} | |
} | |
} | |
} | |
// Collect required parameters and send to the server. | |
//IEnumerator getParam() | |
public void getParam(long CameraTimestamp) | |
{ | |
// Calculate modelview matrix. | |
modelMatrix = spawnedObject.transform.localToWorldMatrix; | |
//viewMatrix = arCameraObject.transform.localToWorldMatrix; | |
//viewMatrix = Matrix4x4.Inverse(Matrix4x4.TRS(arCameraObject.transform.position, arCameraObject.transform.rotation, new Vector3(1,1,1))); | |
//viewMatrix = Matrix4x4.TRS(arCameraObject.transform.position, arCameraObject.transform.rotation, new Vector3(1, 1, -1)); | |
viewMatrix = Matrix4x4.Inverse(Matrix4x4.TRS(arCameraObject.transform.position, arCameraObject.transform.rotation, new Vector3(1, 1, -1))); | |
//Debug.LogError($"modelMatrix: {modelMatrix}"); | |
modelViewMatrix = viewMatrix * modelMatrix; | |
// Add timestamp and type. | |
//JSONObject Data_JSON = new JSONObject(); | |
//if (CameraTimestamp == timestampNs) | |
//{ | |
// Debug.Log("Timestamp Match"); | |
//} | |
//else | |
//{ | |
// Debug.Log("Timestamp Dismatch"); | |
//} | |
Data_JSON.Add("timestamp", CameraTimestamp.ToString()); | |
Data_JSON.Add("type", "ARCore"); | |
// Add color correction. | |
JSONArray ColorCorrectionRgba = new JSONArray(); | |
for (int i = 0; i < 4; i++) | |
{ | |
ColorCorrectionRgba.Add(colorCorrection.Value[i]); | |
} | |
// Add modelview matrix and projection matrix. | |
JSONArray ModelViewMatrix = new JSONArray(); | |
JSONArray ProjectionMatrix = new JSONArray(); | |
for (int i = 0; i < 16; i++) | |
{ | |
ModelViewMatrix.Add(modelViewMatrix.Value[i]); | |
ProjectionMatrix.Add(projectionMatrix.Value[i]); | |
} | |
// Add viewport. | |
JSONArray ViewPort = new JSONArray(); | |
ViewPort.Add(Display.main.systemWidth); | |
ViewPort.Add(Display.main.systemHeight); | |
// Encode color correction & modelview matrix & projection matrix & viewport into body. | |
var body = new JSONObject(); | |
body["colorCorrectionRgba"] = ColorCorrectionRgba; | |
body["modelViewMatrix"] = ModelViewMatrix; | |
body["projectionMatrix"] = ProjectionMatrix; | |
body["viewport"] = ViewPort; | |
Data_JSON.Add("body", body); | |
debugText.text = Data_JSON.ToString(); | |
} | |
public void SendJsonDataToServer() | |
{ | |
//Debug.Log($"Global JsonData: {Data_JSON.ToString()}"); | |
dataSender.StartSendData(Data_JSON.ToString()); | |
} | |
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>(); | |
Light m_Light; | |
ARRaycastManager m_RaycastManager; | |
} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment