cdiddy cdiddy - 2 months ago 21x
Python Question

Boto file upload to S3 failing on Windows [errno: 10054]

I'm trying to upload a file to S3 using boto on a windows 7 machine, but i keep getting an error

[Errno 10054] An existing connection was forcibly closed by the remote host

My code to interact with S3 looks like this

from boto.s3.connection import S3Connection
from boto.s3.key import Key

conn = S3Connection(Access_Key_ID, Secret_Key)
bucket = conn.lookup(bucket_name)

k = Key(bucket)
k.key = 'akeynameformyfile'

The upload works fine on the same machine using the AWS CLI with the following command

aws s3 cp filename.exe s3://bucketname/ttt

The file is about 200MB

My OS is Windows 7, python is running through anaconda with all packages up to date. Boto is version 2.25

This code all runs fine from a CentOS box on the same network. So is this a windows issue?

Any help would be much appreciated

Debug Output Below

send: 'HEAD / HTTP/1.1\r\nHost:\r\nAccept-Encoding:
identity\r\nDate: Wed, 14 May 2014 22:44:31 GMT\r\nContent-Length:
0\r\nAuthorization: AWS ACCESS_KEY_ID:SOME_STUFF=\r\nUser-Agent:
Boto/2.25. 0 Python/2.7.5 Windows/7\r\n\r\n'

reply: 'HTTP/1.1 307 Temporary Redirect\r\n'

header: x-amz-request-id: 8A3D34FB0E0FD8E4

header: x-amz-id-2:

header: Location:

header: Content-Type: application/xml

header: Transfer-Encoding: chunked

header: Date: Wed, 14 May 2014 22:44:31 GMT

header: Server: AmazonS3

send: 'HEAD / HTTP/1.1\r\nHost:\r\nAccept-Encoding:
identity\r\nDate: We d, 14 May 2014 22:44:31 GMT\r\nContent-Length:
0\r\nAuthorization: AWS ACCESS_KEY_ID:SOME_STUFF=\r\nUser-Ag ent:
Boto/2.25.0 Python/2.7.5 Windows/7\r\n\r\n'

reply: 'HTTP/1.1 200 OK\r\n'

header: x-amz-id-2:

header: x-amz-request-id: 2A7BECC45C9BAE7A

header: Date: Wed, 14 May 2014 22:44:33 GMT

header: Content-Type: application/xml

header: Transfer-Encoding: chunked

header: Server: AmazonS3

send: 'PUT /akeynameformyfile HTTP/1.1\r\nHost:\r\nAccept-Encoding:
-Length: 242642944\r\nContent-MD5: xYOiNcyFKGY1Y/HsYwHQeg==\r\nExpect: 100-Continue\r\nDate: Wed, 14 May 2014 22:44:33 GMT\r\nUser- Agent:
Boto/2.25.0 Python/2.7.5 Windows/7\r\nContent-Type:
application/octet-stream\r\nAuthorization: AWS

--------------------------------------------------------------------------- error Traceback (most recent call

in ()

12 k = Key(bucket)
13 k.key = 'akeynameformyfile'

---> 14 k.set_contents_from_filename(full_path_of_file_to_upload)

in set_contents_from_filename(sel f, filename, headers, replace, cb,
num_cb, policy, md5, reduced_redundancy, encrypt_key)

1313 num_cb, policy,
md5, 1314


-> 1315 encrypt_key=encrypt_key)


1317 def set_contents_from_string(self, string_data, headers=None,

in set_contents_from_file(self, f p, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy, query_args, encrypt_key, size,
rewind) 1244

self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb,

1245 query_args=query_args,

-> 1246 chunked_transfer=chunked_transfer, size=size) 1247

return number of bytes written. 1248 return self.size


in send_file(self, fp, headers, c b, num_cb, query_args,
chunked_transfer, size)

723 self._send_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
724 query_args=query_args,

--> 725 chunked_transfer=chunked_transfer, size=size)

727 def _send_file_internal(self, fp, headers=None, cb=None, num_cb=10,

in _send_file_internal(self, fp, headers, cb, num_cb, query_args,
chunked_transfer, size, hash_algs)

912 headers,
913 sender=sender,

--> 914 query_args=query_args

915 )

916 self.handle_version_headers(resp, force=True)

in make_request(self, meth od, bucket, key, headers, data, query_args,
sender, override_num_retries, retry_handler)

631 data, host, auth_path, sender,
632 override_num_retries=override_num_retries,

--> 633 retry_handler=retry_handler

634 )

in make_request(self, method, path, headers, data, host, auth_path,
sender, override_num_retries, params, retry_handler)

1028 params,
headers, data, host)

1029 return self._mexe(http_request, sender,

-> 1030 retry_handler=retry_handler) 1031

1032 def close(self):


in _mexe(self, request, sende r, override_num_retries, retry_handler)

905 if callable(sender):
906 response = sender(connection, request.method, request.path,

--> 907 request.body, request.headers)

908 else:
909 connection.request(request.method, request.path,

in sender(http_conn, method, path , data, headers)

813 http_conn.send('\r\n')
814 else:

--> 815 http_conn.send(chunk)

816 for alg in digesters:

817 digesters[alg].update(chunk)

C:\Users\username\AppData\Local\Continuum\Anaconda\lib\httplib.pyc in
send(self, data)

803 datablock =

804 else:

--> 805 self.sock.sendall(data)


807 def _output(self, s):

C:\Users\username\AppData\Local\Continuum\Anaconda\lib\ssl.pyc in
sendall(self, data, flags)

227 count = 0

228 while (count < amount):

--> 229 v = self.send(data[count:])

230 count += v

231 return amount

C:\Users\username\AppData\Local\Continuum\Anaconda\lib\ssl.pyc in
send(self, data, flags)

196 while True:

197 try:

--> 198 v = self._sslobj.write(data)

199 except SSLError, x:

200 if x.args[0] == SSL_ERROR_WANT_READ:

error: [Errno 10054] An existing connection was forcibly closed by the
remote host


@garnaat made a suggestion in the comments above that solved this for me. Thanks!! For some reason, connecting to the universal endpoint and then trying to upload specifically to the ap-southeast-2 S3 endpoint fails. But if we use the connect_to_region function to initiate the connection and specify the endpoint we want, everything works a-ok! Thanks again, and working example below.

from boto.s3 import connect_to_region
from boto.s3.connection import Location
from boto.s3.key import Key

conn = connect_to_region(Location.APSoutheast2,
bucket = conn.lookup(bucket_name) # bucket is located in Location.APSoutheast2

k = Key(bucket)
k.key = 'akeynameformyfile'