微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

协程 && 异步例子

# 异步redis
# 在使用python代码操作redis的时候,连接、操作、断开都是网络IO。
# 安装aioredis模块:
pip install aioredis==1.3.1  
# 例:  该例子使用的是aioredis==1.3.1,想要使用最新版本,请直接到pipy上查看文档。
import asyncio
import aioredis

async def execute(address, passwd):
    print('start', address)
    redis = await aioredis.create_redis_pool(address, password=passwd)
    await redis.hmset_dict('car', key1=1, key2=2,key3=3)
    result = await redis.hgetall('car', encoding='utf-8')
    print(result)

    redis.close()
    await redis.wait_closed()
    print('end',address)
task_list = [execute('redis://192.168.31.18', None),
            execute('redis://192.168.31.18', None),]
asyncio.run(asyncio.wait(task_list))

# 异步MysqL
# 依赖模块:
sudo pip3 install asyncio
# 例:
import asyncio 
import aioMysqL

async def execute(host, password):
    print('start',host)
    conn = await aioMysqL.connect(host=host,port=3306, user='root', password=password, db='MysqL')
    cur = await conn.cursor()
    await cur.execute('select * from res_user')
    result = await cur.fetchall()
    print(result)

    await cur.close()
    print('end', host)
task_list = [
    execute('127.0.0.1', '123456'),
    execute('127.0.0.1', '123456'),
]
asyncio.run(asyncio.wait(task_list))

# 爬虫异步
import aiohttp
import asyncio

async def fetch(session, url):
    print('发送请求:', url)
    async with session.get(url, verify_ssl=False) as response:
        text = await response.text()
        print('得到结果:', text)
        return text
async def main():
    async with aiohttp.ClientSession() as session:
        url_list = [
            'https://python.org',
            'https://www.baidu.com',
            'https://www.pythonav.com'
        ]
        tasks = [ asyncio.create_task(fetch(session, url)) for url in url_list]
        done, pending = await asyncio.wait(tasks)

if __name__ == '__main__':
    asyncio.run( main() )

 

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。

相关推荐