Last active
June 10, 2020 00:10
-
-
Save symisc/6522ea8f5c577bc1136029a679ab04c3 to your computer and use it in GitHub Desktop.
Blur an image according to its NSFW score (https://pixlab.io/cmd?id=nsfw, https://dev.to/unqlite_db/filter-image-uploads-according-to-their-nsfw-score-15be)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
/* | |
* PixLab PHP Client which is just a single class PHP file without any dependency that you can get from Github | |
* https://github.com/symisc/pixlab-php | |
*/ | |
require_once "pixlab.php"; | |
# Target Image: Change to any link (Possibly adult) you want or switch to POST if you want to upload your image directly, refer to the sample set for more info. | |
$img = 'https://i.redd.it/oetdn9wc13by.jpg'; | |
# Your PixLab key | |
$key = 'My_Pixlab_Key'; | |
# Censure an image according to its NSFW score | |
$pix = new Pixlab($key); | |
/* Invoke NSFW */ | |
if( !$pix->get('nsfw',array('img' => $img)) ){ | |
echo $pix->get_error_message(); | |
die; | |
} | |
/* Grab the NSFW score */ | |
$score = $pix->json->score; | |
if( $score < 0.5 ){ | |
echo "No adult content were detected on this picture\n"; | |
}else{ | |
echo "Censuring NSFW picture...\n"; | |
/* Call blur with the highest possible radius and sigma */ | |
if( !$pix->get('blur',array('img' => $img,'rad' => 50,'sig' =>30)) ){ | |
echo $pix->get_error_message(); | |
}else{ | |
echo "Blurred Picture URL: ".$pix->json->link."\n"; | |
} | |
} | |
?> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import json | |
# Target Image: Change to any link (Possibly adult) you want or switch to POST if you want to upload your image directly, refer to the sample set for more info. | |
img = 'https://i.redd.it/oetdn9wc13by.jpg' | |
# Your PixLab key | |
key = 'Pixlab_Key' | |
# Censure an image according to its NSFW score | |
req = requests.get('https://api.pixlab.io/nsfw',params={'img':img,'key':key}) | |
reply = req.json() | |
if reply['status'] != 200: | |
print (reply['error']) | |
elif reply['score'] < 0.5 : | |
print ("No adult content were detected on this picture") | |
else: | |
# Highly NSFW picture | |
print ("Censuring NSFW picture...") | |
# Call blur with the highest possible radius and sigma | |
req = requests.get('https://api.pixlab.io/blur',params={'img':img,'key':key,'rad':50,'sig':30}) | |
reply = req.json() | |
if reply['status'] != 200: | |
print (reply['error']) | |
else: | |
print ("Blurred Picture URL: "+ reply['link']) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Detect not suitable for work (i.e. nudity & adult) content in a given image or video frame. NSFW is of particular interest, if mixed with some media processing API endpoints like blur, encrypt or mogrify to censor images on the fly according to their nsfw score. This can help the developer automate things such as filtering user's uploads.
for additional information, please refer to the official documentation at https://pixlab.io/cmd?id=nsfw