Python - 如何让客户端能够连接多次?

5 python sockets linux ubuntu python-2.7

当我使用client1 = HTTPClient('192.168.1.2','3')时,它只能工作,但当我使用如下两者时:client1 = HTTPClient('192.168.1.2','3')client2 = HTTPClient('192.168.1.3 ','3')

然后整个事情变得非常缓慢,有时其中一个失败.如何确保client1和client2连接+发送+足够快?

import asyncore, socket

class HTTPClient(asyncore.dispatcher):

  def __init__(self, host, path):
    asyncore.dispatcher.__init__(self)
    self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
    self.settimeout(10)
    try:
      self.connect( (host, 8888) )
    except:
      print 'unable to connect'
      pass
    self.buffer = path

  def handle_connect(self):
    pass

  def handle_close(self):
    self.close()

  def handle_read(self):
    print self.recv(8192)

  def writable(self):
    return (len(self.buffer) > 0)

  def handle_write(self):
    sent = self.send(self.buffer)
    self.buffer = self.buffer[sent:]


client1 = HTTPClient('192.168.1.2', '3')
client2 = HTTPClient('192.168.1.3', '3')
asyncore.loop()
Run Code Online (Sandbox Code Playgroud)

编辑:尝试也线程但相同的结果

import asyncore, socket
import threading
import os

class HTTPClient(asyncore.dispatcher):

  def __init__(self, host, path):
    asyncore.dispatcher.__init__(self)
    self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
    self.settimeout(10)
    try:
      self.connect( (host, 8888) )
    except:
      print 'unable to connect'
      pass
    self.buffer = path

  def handle_connect(self):
    pass

  def handle_close(self):
    self.close()

  def handle_read(self):
    print self.recv(8192)

  def writable(self):
    return (len(self.buffer) > 0)

  def handle_write(self):
    sent = self.send(self.buffer)
    self.buffer = self.buffer[sent:]

def t1():
  client1 = HTTPClient('192.168.1.161', '3')  

def t2():
  client2 = HTTPClient('192.168.1.163', '3')

t0 = threading.Thread(target=t1())
t0.start()
t0.join() 

t0 = threading.Thread(target=t2())
t0.start()
t0.join() 

asyncore.loop()
Run Code Online (Sandbox Code Playgroud)

jfs*_*jfs 0

您的代码中可能存在多个问题,例如,指定目标时删除括号:Thread(target=t1)。如果f是一个函数,则f()立即调用它。您还混合asyncore了阻塞代码和多线程。

如果你想同时建立多个http连接;你可以使用线程池来代替:

import urllib2
from multiprocessing.dummy import Pool # use threads

def fetch(url):
    try:
        return url, urllib2.urlopen(url).read(), None
    except Exception as e:
        return url, None, str(e)

urls = ['http://example.com', ...]
pool = Pool(20) # use no more than 20 concurrent connections
for url, result, error in pool.imap_unordered(fetch, urls):
    if error is None:
       print(url + " done")
    else:
       print(url + " error: " + error)
Run Code Online (Sandbox Code Playgroud)