Compare commits
5 Commits
Author | SHA1 | Date |
---|---|---|
|
2e421ed9be | 9 years ago |
|
6dc5e6d0c2 | 9 years ago |
|
bb95df8d25 | 9 years ago |
|
d41ed4103f | 9 years ago |
|
7bce092ad5 | 9 years ago |
5 changed files with 146 additions and 0 deletions
@ -1 +1,3 @@ |
|||
*.pyc |
|||
venv |
|||
config.yaml |
|||
|
@ -0,0 +1,5 @@ |
|||
mysql: |
|||
hostname: localhost |
|||
username: root |
|||
password: "" |
|||
database: tahoes3 |
@ -0,0 +1,39 @@ |
|||
import lxml |
|||
|
|||
import tornado.httpserver |
|||
import tornado.ioloop |
|||
import tornado.options |
|||
import tornado.web |
|||
|
|||
from tornado.options import define, options |
|||
define("port", default=8888, help="run on the given port", type=int) |
|||
|
|||
class ServiceHandler(tornado.web.RequestHandler): |
|||
def get(self): |
|||
E = ElementMaker(namespace="http://doc.s3.amazonaws.com/2006-03-01", nsmap=NSMAP) |
|||
Et = ElementMaker(namespace="http://doc.cryto.net/xml/tahoe-s3", nsmap=NSMAP) |
|||
|
|||
buckets = [] |
|||
|
|||
|
|||
|
|||
doc = E.ListAllMyBucketsResult( |
|||
E.Owner( |
|||
E.Id("bcaf1ffd86f461ca5fb16fd081034f"), |
|||
E.DisplayName("webfile") |
|||
), |
|||
E.Buckets( |
|||
*buckets |
|||
) |
|||
) |
|||
|
|||
if __name__ == "__main__": |
|||
routes = [ |
|||
(r"/", ServiceHandler) |
|||
] |
|||
|
|||
tornado.options.parse_command_line() |
|||
app = tornado.web.Application(handlers=routes) |
|||
http_server = tornado.httpserver.HTTPServer(app) |
|||
http_server.listen(options.port) |
|||
tornado.ioloop.IOLoop.instance().start() |
@ -0,0 +1,67 @@ |
|||
import hmac, hashlib, base64, urllib |
|||
|
|||
pubkey = "AKIAIOSFODNN7EXAMPLE" |
|||
privkey = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" |
|||
|
|||
def sign(key, message): |
|||
return hmac.new(key, message.encode('utf-8'), hashlib.sha1).digest() |
|||
|
|||
class S3SignatureGenerator(object): |
|||
def set_keys(self, access_key, secret_key): |
|||
self.access_key = access_key |
|||
self.secret_key = secret_key |
|||
|
|||
def generate(self, verb, bucket, path, headers=[], content_md5="", content_type=""): |
|||
date = "" |
|||
|
|||
elements = [ |
|||
verb, |
|||
content_md5, |
|||
content_type, |
|||
date |
|||
] |
|||
|
|||
if len(headers) > 0: |
|||
elements.append(self.canonicalize_headers(headers)) |
|||
|
|||
elements.append(self.canonicalize_resource(bucket, path)) |
|||
|
|||
sts = "\n".join() |
|||
|
|||
def canonicalize_resource(self, bucket=None, path="", subresources={}): |
|||
# TODO: Multi-object DELETE parameter? |
|||
if bucket is None: |
|||
result = "/%s/" % path |
|||
else: |
|||
result = "/%s%s" % (bucket, path) |
|||
|
|||
if len(subresources) > 0: |
|||
subresources = sorted(subresources.items()) |
|||
subresource_strings = [] |
|||
|
|||
for subresource, value in subresources: |
|||
if subresource in ("acl", "lifecycle", "location", "logging", "notification", "partNumber", "policy", "requestPayment", "torrent", "uploadId", "uploads", "versionId", "versioning", "versions", "website"): |
|||
if value is None: |
|||
subresource_strings.append(subresource) |
|||
else: |
|||
# Not sure if this is a correct implementation of the encoding... |
|||
if subresource not in ("response-content-type", "response-content-language", "response-expires", "response-cache-control", "response-content-disposition", "response-content-encoding"): |
|||
value = urllib.quote(str(value)) |
|||
|
|||
subresource_strings.append("%s=%s" % (subresource, value)) |
|||
|
|||
if len(subresource_strings) > 0: # Don't add this if there were no qualifying subresource specifiers |
|||
result += "?%s" % "&".join(subresource_strings) |
|||
|
|||
return result |
|||
|
|||
def canonicalize_headers(self, ): |
|||
pass |
|||
|
|||
def sign(self, sts): |
|||
return base64.b64encode(hmac.new(self.secret_key, sts.encode("utf-8"), hashlib.sha1).digest()) |
|||
|
|||
s = S3SignatureGenerator() |
|||
print s.canonicalize_resource("testbucket", "/test/path", subresources={"torrent": None, "abc-id": 4}) |
|||
|
|||
#print "AWS %s:%s" % (pubkey, base64.b64encode(sign(privkey, sts))) |
@ -0,0 +1,33 @@ |
|||
from lxml.builder import ElementMaker |
|||
from lxml import etree |
|||
|
|||
NSMAP = {None: "http://doc.s3.amazonaws.com/2006-03-01", "tahoe": "http://doc.cryto.net/xml/tahoe-s3"} |
|||
|
|||
E = ElementMaker(namespace="http://doc.s3.amazonaws.com/2006-03-01", nsmap=NSMAP) |
|||
Et = ElementMaker(namespace="http://doc.cryto.net/xml/tahoe-s3", nsmap=NSMAP) |
|||
|
|||
buckets = ( |
|||
E.Bucket( |
|||
E.Name("quotes"), |
|||
E.CreationDate("2006-02-03T16:45:09.000Z"), |
|||
Et.ReadCapability("ABC23423346"), |
|||
Et.WriteCapability("H98NG3040S"), |
|||
Et.VerifyCapability("3148J15990JE"), |
|||
), |
|||
E.Bucket( |
|||
E.Name("samples"), |
|||
E.CreationDate("2006-02-03T16:41:58.000Z") |
|||
) |
|||
) |
|||
|
|||
doc = E.ListAllMyBucketsResult( |
|||
E.Owner( |
|||
E.Id("bcaf1ffd86f461ca5fb16fd081034f"), |
|||
E.DisplayName("webfile") |
|||
), |
|||
E.Buckets( |
|||
*buckets |
|||
) |
|||
) |
|||
|
|||
print etree.tostring(doc, pretty_print=True) |
Loading…
Reference in new issue