Skip to content

Instantly share code, notes, and snippets.

@natxopedreira
Created May 31, 2018 12:42
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save natxopedreira/b2031958b8b7ae637738f706dc0354bf to your computer and use it in GitHub Desktop.
Save natxopedreira/b2031958b8b7ae637738f706dc0354bf to your computer and use it in GitHub Desktop.
//
// s3Threaded.h
// AWS
//
// Created by Natxo Pedreira gonzalez on 31/5/18.
//
//
#ifndef AWS_s3Threaded_h
#define AWS_s3Threaded_h
#include "ofMain.h"
#include <aws/core/Aws.h>
#include <aws/s3/S3Client.h>
#include <aws/s3/model/PutObjectRequest.h>
#include <aws/s3/model/ListObjectsRequest.h>
#include <aws/s3/model/GetObjectRequest.h>
#include <aws/s3/model/DeleteObjectRequest.h>
#include <aws/s3/model/Object.h>
#include <aws/s3/model/CreateBucketRequest.h>
#include <iostream>
#include <fstream>
class as3Job {
public:
Aws::String file = "";
Aws::String bucket ="";
Aws::String nombreSubido = "";
};
class s3Threaded: public ofThread {
public:
s3Threaded()
:newJob(true){
// start the thread as soon as the
// class is created, it won't use any CPU
// until we send a new frame to be analyzed
startThread();
}
~s3Threaded(){
// when the class is destroyed
// close both channels and wait for
// the thread to finish
toDo.close();
done.close();
waitForThread(true);
}
void toUpload(as3Job & job){
// send the frame to the thread for analyzing
// this makes a copy but we can't avoid it anyway if
// we want to update the grabber while analyzing
// previous frames
toDo.send(job);
}
void update(){
// check if there's a new analyzed frame and upload
// it to the texture. we use a while loop to drop any
// extra frame in case the main thread is slower than
// the analysis
// tryReceive doesn't reallocate or make any copies
newJob = false;
while(done.tryReceive(trabajo)){
newJob = true;
}
if(newJob){
//
}
}
as3Job trabajo;
ofThreadChannel<as3Job> toDo;
ofThreadChannel<as3Job> done;
bool newJob;
private:
void threadedFunction(){
as3Job trabajo;
while (toDo.receive(trabajo)) {
/// subimos el archivo, hacemos el trabajo
/*
void uploadFile(Aws::String bucket_name, Aws::String key_name, Aws::String file_name, Aws::String region)
*/
uploadFile(trabajo.bucket, trabajo.nombreSubido, trabajo.file, "eu-west-2");
#if __cplusplus>=201103
done.send(std::move(trabajo));
#else
done.send(trabajo);
#endif
}
}
void uploadFile(Aws::String bucket_name, Aws::String key_name, Aws::String file_name, Aws::String region){
Aws::SDKOptions options;
Aws::InitAPI(options);
{
std::cout << "Uploading " << file_name << " to S3 bucket " <<
bucket_name << " at key " << key_name << std::endl;
Aws::Client::ClientConfiguration clientConfig;
if (!region.empty())
clientConfig.region = region;
Aws::S3::S3Client s3_client(clientConfig);
Aws::S3::Model::PutObjectRequest object_request;
object_request.WithBucket(bucket_name).WithKey(key_name);
auto input_data = Aws::MakeShared<Aws::FStream>("PutObjectInputStream",
file_name.c_str(), std::ios_base::in | std::ios_base::binary);
object_request.SetBody(input_data);
auto put_object_outcome = s3_client.PutObject(object_request);
if (put_object_outcome.IsSuccess())
{
std::cout << "Done!" << std::endl;
/*
static awsEvents newEvent;
newEvent.file = file_name;
ofNotifyEvent(awsEvents::uploadCorrect, newEvent);
*/
}
else
{
std::cout << "Error!" << std::endl;
/*
stringstream ss;
ss << put_object_outcome.GetError().GetExceptionName() << " " <<
put_object_outcome.GetError().GetMessage()<< endl;
static awsEvents newEvent;
newEvent.error = ss.str();
ofNotifyEvent(awsEvents::uploadFailed, newEvent);
*/
/*
std::cout << "PutObject error: " <<
put_object_outcome.GetError().GetExceptionName() << " " <<
put_object_outcome.GetError().GetMessage() << std::endl;*/
}
}
}
};
#endif
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment