Skip to content

Instantly share code, notes, and snippets.

@jryebread
Created August 8, 2018 22:18
Show Gist options
  • Star 8 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save jryebread/2bdf148313f40781f1f36d38ada85d47 to your computer and use it in GitHub Desktop.
Save jryebread/2bdf148313f40781f1f36d38ada85d47 to your computer and use it in GitHub Desktop.
import os
os.environ['PYTHONASYNCIODEBUG'] = '1'
import asyncio
import logging
import cv2
import base64
import cv2
import numpy as np
import numpy as np
import scipy
import scipy.misc
import matplotlib.pyplot as plt
import VideoGradientTest
from PIL import Image
logging.basicConfig(level=logging.ERROR)
async def tcp_echo_client(data, loop):
reader, writer = await asyncio.open_connection('192.168.1.117', 8080, loop=loop)
print('Sending data of size: %r' % str(len(data)))
#sending the size and data byte array
print("Sending data: " + str(len(data)) + str(data))
writer.write(str(len(data)).encode() + str(data).encode())
await writer.drain()
#print("Message: %r" %(data))
print(len(data))
print('Close the socket')
writer.write_eof()
#writer.close()
def grab_frame(cap):
ret, frame = cap.read()
width = cap.get(3) # float
height = cap.get(4) # float
gMagImg = VideoGradientTest.getWindowedGradientMagnitude(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB), int(width), int(height))
FrameTempTest = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
return gMagImg[0:650, 0:1200]
# START HERE
# Initiate the camera
cap = cv2.VideoCapture(0)
cap.set(3, 1280)
cap.set(4, 720)
width = cap.get(3)
height = cap.get(4)
print(width)
print(height)
# create a subplot
fig = plt.figure(frameon=False)
plt.axis("off")
fig.set_size_inches(4 ,3)
#make the content fit the whole figure
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
#continually send frames
while cv2.waitKey(1) & 0xFF != ord('q'):
frame = grab_frame(cap)
#draw your image to the plot
#imPlot.set_data(frame) #PLOT THE BITS DATA
plt.contourf(frame) #Take the contour of the imgData
plt.savefig('foo.jpg', bbox_inches='tight', transparent=True, pad_inches=0)
with open("foo.jpg", "rb") as imageFile:
data = base64.b64encode((imageFile.read()))
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(data, loop))
plt.ioff()
loop.close()
using UnityEngine;
using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Collections;
using System.Collections.Generic;
//using HUX.Interaction;
//using HUX.Receivers;
using UnityEngine.UI;
//<JEM>Ignore unity editor and run this code in the hololens instead</JEM>
#if !UNITY_EDITOR
using System.Threading;
using System.Threading.Tasks;
using Windows.Networking;
using Windows.Networking.Sockets;
using Windows.Networking.Connectivity;
using Windows.Storage.Streams;
#endif
//Able to act as a reciever
public class ImageReciever : MonoBehaviour
{
public Texture preLoadedTexture;
public GameObject debugLogText;
Stream streamIn;
Renderer rend;
Texture2D texture;
int counter = 0;
byte[] byArray;
bool logInputSize = false;
bool socketClosed = false;
bool writeStringToFile = false;
bool loadTexture = false;
bool logSize = false;
bool recievedData = false;
bool logRealInput = false;
string realInput;
int sizeOfBuffer;
string error;
uint imageSize;
#if !UNITY_EDITOR
StreamSocket socket;
StreamSocketListener listener;
String port;
String message;
#endif
// Use this for initialization
void Start()
{
#if !UNITY_EDITOR
rend = this.GetComponent<Renderer>();
listener = new StreamSocketListener();
port = "8080";
texture = new Texture2D(160,120, TextureFormat.RGBA32, false);
listener.ConnectionReceived += _receiver_socket_ConnectionReceived;
listener.Control.KeepAlive = false;
Listener_Start();
#endif
}
#if !UNITY_EDITOR
private async void Listener_Start()
{
LOG("listninboi");
Debug.Log("Listener started");
try
{
LOG("Listener started");
await listener.BindServiceNameAsync(port);
}
catch (Exception e)
{
Debug.Log("Error: " + e.Message);
}
}
private async void _receiver_socket_ConnectionReceived(StreamSocketListener sender, StreamSocketListenerConnectionReceivedEventArgs args)
{
try
{
while(true)
{
using (var dr = new DataReader(args.Socket.InputStream))
{
dr.InputStreamOptions = InputStreamOptions.Partial;
await dr.LoadAsync(60000); //loading the buffer
//Read the buffer but I dont want to read the whole 15000
var inputSize = dr.ReadString(5); //reading the buffer, there may be a b in it so we remove that if found
if(inputSize.EndsWith("b"))
{
inputSize = inputSize.Substring(0, (inputSize.Length - 1)); //Remove the b from the size string if the size is 4 bytes
}
imageSize = Convert.ToUInt32(inputSize);
logInputSize = true;
recievedData = true;
var input = dr.ReadString(imageSize);
//trim off the b'' part of the base64 encoding, if 4bytes remove 1 if 5 bytes remove 2
if(inputSize.Length == 4)
{
input = input.Substring(1, input.Length - 1);
}
else
{
input = input.Substring(2, input.Length - 2);
}
//input = input.Substring(0, (input.Length - 1));
realInput = input.Substring(0, 10) + "\n" + input.Substring(input.Length - 3); //display realInput start and end bytes
logRealInput = true;
while(input.Length % 4 != 0)
{
input += "=";
}
byte[] byteArray = Convert.FromBase64String(input);
byArray = byteArray; //set the public byArray to the bytearray texture
loadTexture = true;
//writeToFile(input);
}
}
}
catch (Exception e)
{
error = e.Message;
socketClosed = true;
}
}
//private async Task readTCPDataAsync(DataReader reader)
//{
// reader.InputStreamOptions = InputStreamOptions.None;
// // Read the length of the payload that will be received.
// byte[] payloadSize = new byte[(uint)BitConverter.GetBytes(0).Length];
// await reader.LoadAsync((uint)payloadSize.Length);
// reader.ReadBytes(payloadSize);
// // Read the payload.
// int size = BitConverter.ToInt32(payloadSize, 0);
// sizeOfBuffer = size;
// logSize = true;
// byte[] payload = new byte[size];
// await reader.LoadAsync((uint)size);
// reader.ReadBytes(payload);
// string data = Encoding.ASCII.GetString(payload);
// //write the data to file to see if yoya and image is recieved for sure
// writeToFile(payload);
// writeToFile(data);
// //set the public variable byArray to the payload image to be set in the main update routine
// byArray = payload;
// loadTexture = true;
// writeStringToFile = true;
//}
public static byte[] StringToByteArray(string hex)
{
return Enumerable.Range(0, hex.Length)
.Where(x => x % 2 == 0)
.Select(x => Convert.ToByte(hex.Substring(x, 2), 16))
.ToArray();
}
//private async Task readTCPDataAsync(DataReader reader)
//{
// reader.InputStreamOptions = InputStreamOptions.Partial;
// uint numFileBytes = await reader.LoadAsync(reader.UnconsumedBufferLength);
// byArray = new byte[numFileBytes];
// reader.ReadBytes(byArray);
// texture.LoadImage(byArray);
//}
#endif
void writeToFile(byte[] bytes)
{
string path = Path.Combine(Application.persistentDataPath, "MyFile.txt");
using (TextWriter writer = File.CreateText(path))
{
writer.Write("hey there frend this is working now whoooo!");
writer.Write(bytes);
}
}
void writeToFile(String str)
{
string path = Path.Combine(Application.persistentDataPath, "MyFile.txt");
using (TextWriter writer = File.CreateText(path))
{
writer.Write("Hey there frend");
}
}
void LOG(string msg)
{
debugLogText.GetComponent<TextMesh>().text += "\n " + msg;
}
void Update()
{
if(logRealInput)
{
LOG("INPUT IS: " + realInput);
logRealInput = false;
}
if(logInputSize)
{
LOG("IMAGESIZE IS : " + imageSize);
logInputSize = false;
}
if(logSize)
{
LOG("SIZE IS : " + sizeOfBuffer);
logSize = false;
}
if(socketClosed)
{
LOG(error);
LOG("OOPS SOCKET CLOSED ");
socketClosed = false;
}
if(writeStringToFile)
{
LOG("WRITTEN TO FILE");
writeStringToFile = false;
}
if(loadTexture)
{
LOG("LOADING IMAGE CURRENTLY");
texture.LoadImage(byArray);
this.rend.material.mainTexture = preLoadedTexture;
this.rend.material.mainTexture = texture;
LOG("LOADED IMAGE");
loadTexture = false;
}
if(recievedData)
{
LOG("recieved data!");
recievedData = false;
}
}
}
@jryebread
Copy link
Author

can remove line 268

@Dinaya
Copy link

Dinaya commented Oct 21, 2019

Hi
The UWP server is start success.
But when I call python to connect to UWP, I got "TimeoutError: [Errno 10060] Connect call failed ('192.168.100.132', 12348)".

Do you have any idea?
Is there has any environment need to setting?
(I just setting Capability.)

Thanks for read.


OK, I got it!
Need to close Hololens's SSL connection.
And need two device to connecting. (Can't same device.)

@atolegen
Copy link

atolegen commented Nov 3, 2020

ModuleNotFoundError: No module named 'VideoGradientTest'
Where did you get this module?

@Matteo-0
Copy link

ModuleNotFoundError: No module named 'VideoGradientTest'
Where did you get this module?

Did you find this module? I am not able to find it

@Matteo-0
Copy link

Please can you tell me where to find the VideoGradientTest Module, I really need it

@Matteo-0
Copy link

Please @jryebread can you help me?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment