refine and add README

main
josch 10 years ago
parent d723f35a71
commit 2c91d36498

@ -0,0 +1 @@
python initial_pano_id.py 53.168051 8.647656 | python walk_pano_id.py 10 | python streetview.py 5

@ -0,0 +1,28 @@
#!/usr/env python
import sys
import urllib2
from xml.etree.ElementTree import ElementTree, fromstring
if len(sys.argv) != 3:
exit("need to supply lat and lon")
lat = sys.argv[1]
lon = sys.argv[2]
# yaw north: 0 south: 180 east: 90 west: 270
# pitch up: -90 down: 90
# http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA
try:
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&ll=%s,%s"%(lat, lon))
except urllib2.HTTPError as e:
exit(e.code + " " + e.msg)
doc = fromstring(xml.read())
pano_id = doc.find("data_properties")
if pano_id is None:
exit("no streetview data avaiable here")
print pano_id.get("pano_id")

@ -6,52 +6,61 @@ from xml.etree.ElementTree import ElementTree, fromstring
from PIL import Image from PIL import Image
from StringIO import StringIO from StringIO import StringIO
if len(sys.argv) != 4: gridsizes = [(1, 1),
exit("need to supply lat, lon and zoom (0-5)") (2, 1),
(4, 2),
lat = sys.argv[1] (6, 3),
lon = sys.argv[2] (13, 7),
zoom = int(sys.argv[3]) (26, 13)]
# yaw north: 0 south: 180 east: 90 west: 270 # yaw north: 0 south: 180 east: 90 west: 270
# pitch up: -90 down: 90 # pitch up: -90 down: 90
# http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA # http://cbk0.google.com/cbk?output=xml&panoid=p-DIQUVaFuGHWxVqpLstbA
try: def process(panoid, zoom):
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&ll=%s,%s"%(lat, lon)) print >>sys.stderr, panoid
except urllib2.HTTPError as e: # retrieve tiles and save panorama
exit(e.code + " " + e.msg) panorama = Image.new("RGB", (gridsizes[zoom][0]*512, gridsizes[zoom][1]*512))
try:
for x in xrange(gridsizes[zoom][0]):
for y in xrange(gridsizes[zoom][1]):
print >>sys.stderr, x, y
tile = urllib2.urlopen("http://cbk0.google.com/cbk?output=tile&panoid=%s&zoom=%d&x=%d&y=%d"%(panoid, zoom, x, y))
imtile = Image.open(StringIO(tile.read()))
panorama.paste(imtile, (x*512, y*512))
except urllib2.HTTPError as e:
exit(e.code + " " + e.msg)
panorama.save("%s_panorama.jpg"%panoid)
doc = fromstring(xml.read()) # divide into 16 images around the center
pano_id = doc.find("data_properties") num_imgs = 8
if pano_id is None: cwidth = (512*gridsizes[zoom][0])/num_imgs
exit("no streetview data avaiable here") cheight = (cwidth*3)/4 # 4:3 aspect
upper = (gridsizes[zoom][1]*512)/2 - cheight/2
lower = upper+cheight
pano_id = pano_id.get("pano_id") for i in xrange(num_imgs):
p = panorama.crop((i*cwidth,upper,(i+1)*cwidth,lower))
filename="%s_img_%003d.jpg"%(panoid,i)
p.save(filename)
gridsizes = [(1, 1), # save thumbnail
(2, 1), try:
(4, 2), url = urllib2.urlopen("http://cbk0.google.com/cbk?output=thumbnail&panoid=%s&w=416&h=208"%panoid)
(6, 3), open("%s_thumb.jpg"%panoid, "w").write(url.read())
(13, 7), except urllib2.HTTPError as e:
(26, 13)] exit(e.code + " " + e.msg)
if __name__ == "__main__":
if len(sys.argv) != 2:
exit("need to supply zoom (0-5)")
zoom = int(sys.argv[1])
panorama = Image.new("RGB", (gridsizes[zoom][0]*512, gridsizes[zoom][1]*512)) from_stdin = sys.stdin.readlines()
for line in from_stdin:
process(line.strip(), zoom)
try:
for x in xrange(gridsizes[zoom][0]):
for y in xrange(gridsizes[zoom][1]):
tile = urllib2.urlopen("http://cbk0.google.com/cbk?output=tile&panoid=%s&zoom=%d&x=%d&y=%d"%(pano_id, zoom, x, y))
imtile = Image.open(StringIO(tile.read()))
panorama.paste(imtile, (x*512, y*512))
except urllib2.HTTPError as e:
exit(e.code + " " + e.msg)
panorama.save("panorama.jpg")
try:
url = urllib2.urlopen("http://cbk0.google.com/cbk?output=thumbnail&w=416&h=208&ll=%s,%s"%(lat, lon))
open("thumbnail.jpg", "w").write(url.read())
except urllib2.HTTPError as e:
exit(e.code + " " + e.msg)

@ -0,0 +1,42 @@
#!/usr/env python
import sys
import urllib2
from xml.etree.ElementTree import ElementTree, fromstring
def links_from_panoid(panoid):
try:
xml = urllib2.urlopen("http://cbk0.google.com/cbk?output=xml&panoid=%s"%panoid)
except urllib2.HTTPError as e:
exit(e.code + " " + e.msg)
doc = fromstring(xml.read())
return [link.get("pano_id") for link in doc.findall("annotation_properties/link")]
if __name__ == "__main__":
if len(sys.argv) != 2:
exit("need to supply maximum number of positions")
from_stdin = sys.stdin.readlines()
if len(from_stdin) != 1:
exit("only one initial pano_id expected on stdin")
init_pano = from_stdin[0].strip()
max_pos = int(sys.argv[1])
result = set([init_pano])
toprocess = [init_pano]
# breadth first search
while len(result) < max_pos and toprocess:
newtoprocess = []
for panoid in toprocess:
if len(result) >= max_pos:
break
ids = links_from_panoid(panoid)
for p in ids:
if p not in result:
newtoprocess.append(p)
result.update(ids)
toprocess = newtoprocess
for p in result:
print p
Loading…
Cancel
Save