#!/usr/bin/perl use strict; use DBI; use File::MMagic; use CGI; use File::Slurp; use Amazon::S3; use JSON; use Redis; #set the temporary directory here my $uploaddir = '/tmp/'; #maximum filesize goes here my $maxFileSize = 0.5 * 1024 * 1024; my $maxRequestInterval = 40; my $maxRequestLimit = 3; my $thumbnailType = "png"; #aws credentials here my $aws_access_key_id = "AKIAIR53VPBXKJMXZIBA"; my $aws_secret_access_key = "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"; #SQL CONNECTIONS my $sql_username = "asdfus"; my $sql_dbname = "asdfus"; my $sql_passwd = "gTYgT&M6q"; #redis my $redis = Redis->new; my $api_name = "shaderThumbnail"; my $time_now = time(); my $IN = new CGI; my $resp = {}; sub get_postdata{ return $IN->param('POSTDATA'); } sub check_limits{ my ($api_name, $tf_length, $limit, $ip, $now) = @_; my $new_tf; my $value = $redis->get(sprintf("%s_%s", $ip, $api_name)); if (! $value){ # create new timeframe $new_tf = sprintf("%d-%d", $now , 1); # tf starts now }else{ my ($tf_start, $req_count) = ($value =~ /(\d+)-(\d+)/); my $reset_time = $tf_start + $tf_length; if ($reset_time < $now) { $new_tf = sprintf("%d-%d", $now , 1); }else{ # still in tf, check limits, increase counter $req_count++; if ($req_count > $limit){ # return the too many requests error return int($reset_time - $now) }else{ # increase request count $new_tf = sprintf("%d-%d", $tf_start, $req_count); } } } my $redis_key = sprintf("%s_%s", $ip, $api_name ); $redis->set($redis_key, $new_tf); $redis->expire($redis_key, $tf_length*2); #arbitrary time longer than other interval return 0; } #qq file is the default name for the file parameter when using qq upload. change if necessary sub get_filedata{ my $file = $IN->upload('qqfile'); if ($file){ return read_file( $file, binmode => ':raw' ) ; }else{ return } } sub make_filename{ return sprintf("%s.%s", $IN->param("id"), $thumbnailType); } sub make_thumbnail_url{ my $filename = shift; return sprintf("http://i.asdf.us/shader_thumb/%s", $filename); } sub add_to_db{ my $thumbnail_url = shift; my $shader_id = shift; our $dbh = DBI->connect("DBI:mysql:$sql_dbname", $sql_username, $sql_passwd); unless($dbh){ return undef && print STDERR "Could not connect to database: $DBI::errstr"}; my $execute = $dbh->do( "UPDATE shaders set thumbnail_url = ? WHERE id = ?". undef, $thumbnail_url, $shader_id ); return $execute; } sub write_to_file{ my $filepath = shift; my $filedata = shift; write_file( $filepath, {binmode => ':raw'}, $filedata ) or return undef; return 1; } sub upload_to_AmazonS3{ my $keyname = shift; my $value = shift; my $s3 = Amazon::S3->new( { aws_access_key_id => $aws_access_key_id, aws_secret_access_key => $aws_secret_access_key } ); my $bucket = $s3->bucket("i.asdf.us"); $bucket->add_key( $keyname, $value, { content_type => sprintf("image/%s", $thumbnailType) } ); return $bucket; } sub error{ my $message = shift; my $resp = { success => "false", "error" => $message }; print JSON->new->encode($resp); exit 1; } sub main{ print $IN->header(); check_limits($api_name, $maxRequestInterval, $maxRequestLimit, $ENV{REMOTE_ADDR}, $time_now); my $filedata = get_postdata() || get_filedata(); unless($filedata){ error("NO DATA RECIEVED") }; my $filesize = length($filedata); if ($filesize < 1) { error( "EMPTY FILE"); } elsif ($filesize > $maxFileSize) { error("FILE TOO LARGE"); } my $thumbnail_url = make_thumbnail_url(make_filename); unless(upload_to_AmazonS3(sprintf("shader_thumb/%s", make_filename()), $filedata)){ error("UNABLE TO UPLOAD TO AMAZONS3"); }; unless(add_to_db($IN->param('$id'))){ print STDERR "problem adding value to db"; } my $resp = { success => "true", url => $thumbnail_url, filesize => $filesize }; print JSON->new->encode($resp); exit(0); } main();