MapLocNet / osm /download.py
wangerniu
Commit message.
124ba77
# Copyright (c) Meta Platforms, Inc. and affiliates.
import json
from pathlib import Path
from typing import Dict, Optional
import urllib3
from utils.geo import BoundaryBox
import urllib.request
import requests
def get_osm(
boundary_box: BoundaryBox,
cache_path: Optional[Path] = None,
overwrite: bool = False,
) -> str:
if not overwrite and cache_path is not None and cache_path.is_file():
with cache_path.open() as fp:
return json.load(fp)
(bottom, left), (top, right) = boundary_box.min_, boundary_box.max_
content: bytes = get_web_data(
# "https://api.openstreetmap.org/api/0.6/map.json",
"https://openstreetmap.erniubot.live/api/0.6/map.json",
# 'https://overpass-api.de/api/map',
# 'http://localhost:29505/api/map',
# "https://lz4.overpass-api.de/api/interpreter",
{"bbox": f"{left},{bottom},{right},{top}"},
)
content_str = content.decode("utf-8")
if content_str.startswith("You requested too many nodes"):
raise ValueError(content_str)
if cache_path is not None:
with cache_path.open("bw+") as fp:
fp.write(content)
a=json.loads(content_str)
return json.loads(content_str)
def get_web_data(address: str, parameters: Dict[str, str]) -> bytes:
# logger.info("Getting %s...", address)
# proxy_address = "http://107.173.122.186:3128"
#
# # 设置代理服务器地址和端口
# proxies = {
# 'http': proxy_address,
# 'https': proxy_address
# }
# 发送GET请求并返回响应数据
# response = requests.get(address, params=parameters, timeout=100, proxies=proxies)
print('url:',address)
response = requests.get(address, params=parameters, timeout=100)
return response.content
def get_web_data(address: str, parameters: Dict[str, str]) -> bytes:
# logger.info("Getting %s...", address)
while True:
try:
# proxy_address = "http://107.173.122.186:3128"
#
# # 设置代理服务器地址和端口
# proxies = {
# 'http': proxy_address,
# 'https': proxy_address
# }
# # 发送GET请求并返回响应数据
response = requests.get(address, params=parameters, timeout=100)
request = requests.Request('GET', address, params=parameters)
prepared_request = request.prepare()
# 获取完整URL
full_url = prepared_request.url
break
except Exception as e:
# 打印错误信息
print(f"发生错误: {e}")
print("重试...")
return response.content
# def get_web_data_2(address: str, parameters: Dict[str, str]) -> bytes:
# # logger.info("Getting %s...", address)
# proxy_address="http://107.173.122.186:3128"
# http = urllib3.PoolManager(proxy_url=proxy_address)
# result = http.request("GET", address, parameters, timeout=100)
# return result.data
#
#
# def get_web_data_1(address: str, parameters: Dict[str, str]) -> bytes:
#
# # 设置代理服务器地址和端口
# proxy_address = "http://107.173.122.186:3128"
#
# # 创建ProxyHandler对象
# proxy_handler = urllib.request.ProxyHandler({'http': proxy_address})
#
# # 构建查询字符串
# query_string = urllib.parse.urlencode(parameters)
#
# # 构建完整的URL
# url = address + '?' + query_string
# print(url)
# # 创建OpenerDirector对象,并将ProxyHandler对象作为参数传递
# opener = urllib.request.build_opener(proxy_handler)
#
# # 使用OpenerDirector对象发送请求
# response = opener.open(url)
#
# # 发送GET请求
# # response = urllib.request.urlopen(url, timeout=100)
#
# # 读取响应内容
# data = response.read()
# print()
# return data