refine and add README
This commit is contained in:
parent
d723f35a71
commit
2c91d36498
4 changed files with 119 additions and 39 deletions
1
README
Normal file
1
README
Normal file
|
@ -0,0 +1 @@
|
||||||
|
python initial_pano_id.py 53.168051 8.647656 | python walk_pano_id.py 10 | python streetview.py 5
|
28
initial_pano_id.py
Normal file
28
initial_pano_id.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
#!/usr/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import urllib2
|
||||||
|
from xml.etree.ElementTree import ElementTree, fromstring
|
||||||
|
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
exit("need to supply lat and lon")
|
||||||
|
|
||||||
|
lat = sys.argv[1]
|
||||||
|
lon = sys.argv[2]
|
||||||
|
|
||||||
|
# yaw north: 0 south: 180 east: 90 west: 270
|
||||||
|
# pitch up: -90 down: 90
|
||||||
|
# http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA
|
||||||
|
|
||||||
|
try:
|
||||||
|
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&ll=%s,%s"%(lat, lon))
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
exit(e.code + " " + e.msg)
|
||||||
|
|
||||||
|
doc = fromstring(xml.read())
|
||||||
|
pano_id = doc.find("data_properties")
|
||||||
|
|
||||||
|
if pano_id is None:
|
||||||
|
exit("no streetview data avaiable here")
|
||||||
|
|
||||||
|
print pano_id.get("pano_id")
|
|
@ -6,30 +6,6 @@ from xml.etree.ElementTree import ElementTree, fromstring
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|
||||||
if len(sys.argv) != 4:
|
|
||||||
exit("need to supply lat, lon and zoom (0-5)")
|
|
||||||
|
|
||||||
lat = sys.argv[1]
|
|
||||||
lon = sys.argv[2]
|
|
||||||
zoom = int(sys.argv[3])
|
|
||||||
|
|
||||||
# yaw north: 0 south: 180 east: 90 west: 270
|
|
||||||
# pitch up: -90 down: 90
|
|
||||||
# http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA
|
|
||||||
|
|
||||||
try:
|
|
||||||
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&ll=%s,%s"%(lat, lon))
|
|
||||||
except urllib2.HTTPError as e:
|
|
||||||
exit(e.code + " " + e.msg)
|
|
||||||
|
|
||||||
doc = fromstring(xml.read())
|
|
||||||
pano_id = doc.find("data_properties")
|
|
||||||
|
|
||||||
if pano_id is None:
|
|
||||||
exit("no streetview data avaiable here")
|
|
||||||
|
|
||||||
pano_id = pano_id.get("pano_id")
|
|
||||||
|
|
||||||
gridsizes = [(1, 1),
|
gridsizes = [(1, 1),
|
||||||
(2, 1),
|
(2, 1),
|
||||||
(4, 2),
|
(4, 2),
|
||||||
|
@ -37,21 +13,54 @@ gridsizes = [(1, 1),
|
||||||
(13, 7),
|
(13, 7),
|
||||||
(26, 13)]
|
(26, 13)]
|
||||||
|
|
||||||
panorama = Image.new("RGB", (gridsizes[zoom][0]*512, gridsizes[zoom][1]*512))
|
# yaw north: 0 south: 180 east: 90 west: 270
|
||||||
|
# pitch up: -90 down: 90
|
||||||
|
# http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA
|
||||||
|
|
||||||
|
def process(panoid, zoom):
|
||||||
|
print >>sys.stderr, panoid
|
||||||
|
# retrieve tiles and save panorama
|
||||||
|
panorama = Image.new("RGB", (gridsizes[zoom][0]*512, gridsizes[zoom][1]*512))
|
||||||
try:
|
try:
|
||||||
for x in xrange(gridsizes[zoom][0]):
|
for x in xrange(gridsizes[zoom][0]):
|
||||||
for y in xrange(gridsizes[zoom][1]):
|
for y in xrange(gridsizes[zoom][1]):
|
||||||
tile = urllib2.urlopen("http://cbk0.google.com/cbk?output=tile&panoid=%s&zoom=%d&x=%d&y=%d"%(pano_id, zoom, x, y))
|
print >>sys.stderr, x, y
|
||||||
|
tile = urllib2.urlopen("http://cbk0.google.com/cbk?output=tile&panoid=%s&zoom=%d&x=%d&y=%d"%(panoid, zoom, x, y))
|
||||||
imtile = Image.open(StringIO(tile.read()))
|
imtile = Image.open(StringIO(tile.read()))
|
||||||
panorama.paste(imtile, (x*512, y*512))
|
panorama.paste(imtile, (x*512, y*512))
|
||||||
except urllib2.HTTPError as e:
|
except urllib2.HTTPError as e:
|
||||||
exit(e.code + " " + e.msg)
|
exit(e.code + " " + e.msg)
|
||||||
|
panorama.save("%s_panorama.jpg"%panoid)
|
||||||
|
|
||||||
panorama.save("panorama.jpg")
|
# divide into 16 images around the center
|
||||||
|
num_imgs = 8
|
||||||
|
|
||||||
|
cwidth = (512*gridsizes[zoom][0])/num_imgs
|
||||||
|
cheight = (cwidth*3)/4 # 4:3 aspect
|
||||||
|
upper = (gridsizes[zoom][1]*512)/2 - cheight/2
|
||||||
|
lower = upper+cheight
|
||||||
|
|
||||||
|
for i in xrange(num_imgs):
|
||||||
|
p = panorama.crop((i*cwidth,upper,(i+1)*cwidth,lower))
|
||||||
|
filename="%s_img_%003d.jpg"%(panoid,i)
|
||||||
|
p.save(filename)
|
||||||
|
|
||||||
|
# save thumbnail
|
||||||
try:
|
try:
|
||||||
url = urllib2.urlopen("http://cbk0.google.com/cbk?output=thumbnail&w=416&h=208&ll=%s,%s"%(lat, lon))
|
url = urllib2.urlopen("http://cbk0.google.com/cbk?output=thumbnail&panoid=%s&w=416&h=208"%panoid)
|
||||||
open("thumbnail.jpg", "w").write(url.read())
|
open("%s_thumb.jpg"%panoid, "w").write(url.read())
|
||||||
except urllib2.HTTPError as e:
|
except urllib2.HTTPError as e:
|
||||||
exit(e.code + " " + e.msg)
|
exit(e.code + " " + e.msg)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
exit("need to supply zoom (0-5)")
|
||||||
|
|
||||||
|
zoom = int(sys.argv[1])
|
||||||
|
|
||||||
|
from_stdin = sys.stdin.readlines()
|
||||||
|
for line in from_stdin:
|
||||||
|
process(line.strip(), zoom)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
42
walk_pano_id.py
Normal file
42
walk_pano_id.py
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
#!/usr/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import urllib2
|
||||||
|
from xml.etree.ElementTree import ElementTree, fromstring
|
||||||
|
|
||||||
|
def links_from_panoid(panoid):
|
||||||
|
try:
|
||||||
|
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&panoid=%s"%panoid)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
exit(e.code + " " + e.msg)
|
||||||
|
doc = fromstring(xml.read())
|
||||||
|
return [link.get("pano_id") for link in doc.findall("annotation_properties/link")]
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
exit("need to supply maximum number of positions")
|
||||||
|
|
||||||
|
from_stdin = sys.stdin.readlines()
|
||||||
|
if len(from_stdin) != 1:
|
||||||
|
exit("only one initial pano_id expected on stdin")
|
||||||
|
init_pano = from_stdin[0].strip()
|
||||||
|
max_pos = int(sys.argv[1])
|
||||||
|
|
||||||
|
result = set([init_pano])
|
||||||
|
toprocess = [init_pano]
|
||||||
|
|
||||||
|
# breadth first search
|
||||||
|
while len(result) < max_pos and toprocess:
|
||||||
|
newtoprocess = []
|
||||||
|
for panoid in toprocess:
|
||||||
|
if len(result) >= max_pos:
|
||||||
|
break
|
||||||
|
ids = links_from_panoid(panoid)
|
||||||
|
for p in ids:
|
||||||
|
if p not in result:
|
||||||
|
newtoprocess.append(p)
|
||||||
|
result.update(ids)
|
||||||
|
toprocess = newtoprocess
|
||||||
|
|
||||||
|
for p in result:
|
||||||
|
print p
|
Loading…
Reference in a new issue