custom bison fute getter

This commit is contained in:
Tykayn 2025-09-17 23:52:49 +02:00 committed by tykayn
parent 6077e84f08
commit 339147e762
8 changed files with 238 additions and 1 deletions

7
.gitignore vendored
View file

@ -3,4 +3,9 @@ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
.DS_Store .DS_Store
.env .env
venv
datasources
extractors/**/*.zip
extractors/**/*.7z
extractors/**/*.json

3
.gitmodules vendored Normal file
View file

@ -0,0 +1,3 @@
[submodule "datasources"]
path = datasources
url = https://github.com/openeventdatabase/datasources.git

1
datasources Submodule

@ -0,0 +1 @@
Subproject commit 4f1410e69cbee6c1c4f8864240379678ad6162cd

View file

@ -0,0 +1,63 @@
import requests
import json
import time
from pyproj import Proj, transform
"""
Scrapping des données de circulation sur bisonfute.fr
export en stream geojson avec 1 événement par ligne
"""
def charge(x,y,z):
nb = 0
deeper = False
url = dataurl+("trafic/maintenant/tfs/evenements/%s/%s/%s.json" % (z,x,y))
print(f'scraping sur l url : ${url}')
datareq = requests.get(url=url)
datajson = json.loads(datareq.text)
if not datajson['empty']:
for e in datajson['features']:
if e['properties']['urlImage'][7]=='1':
deeper = True
lon,lat = transform(s_srs,t_srs,e['geometry']['coordinates'][0],e['geometry']['coordinates'][1])
geometry=dict(type = 'Point', coordinates = [round(lon,6),round(lat,6)])
quoi = e['properties']['urlImage'][11:-4]
if quoi == 'travaux':
quoi = 'roadwork'
if quoi == 'bouchon':
quoi = 'jam'
e_what = 'traffic.'+quoi
detailreq = requests.get('http://www.bison-fute.gouv.fr/' + e['properties']['urlcpc'])
detail = json.loads(detailreq.text)
if len(detail)>1:
deeper = True
else:
for d in detail:
print(json.dumps({"geometry":geometry, "properties":{"data":d}}))
#
# if deeper:
# charge(2*x, 2*y, z+1)
# charge((2*x)+1, 2*y, z+1)
# charge(2*x, (2*y)+1, z+1)
# charge((2*x)+1, (2*y)+1, z+1)
#
return nb
# projections utilisées pour transformation en WGS84
s_srs = Proj(init='EPSG:2154')
t_srs = Proj(init='EPSG:4326')
# récupération date courante
datereq = requests.get(url='http://www.bison-fute.gouv.fr/data/iteration/date.json')
datejson = json.loads(datereq.text)
dernier = time.strftime('%Y%m%d-%H%M%S', time.localtime(datejson[0]/1000))
dataurl = "http://www4.bison-fute.gouv.fr/data/data-%s/" % dernier
print(f'dataurl : ${dataurl}')
for x0 in range(0,5):
for y0 in range (0,5):
charge(x0,y0,1)

View file

@ -0,0 +1,71 @@
date,aller,retour
02/01/16,,1O
03/01/16,,O
06/02/16,O4R,
13/02/16,O4N,4O6O
19/02/16,O,
20/02/16,O4N,3R4R
26/02/16,1O,
27/02/16,1O4O,O4R
05/03/16,4O,1O4O
25/03/16,O1R4R,
26/03/16,O,
28/03/16,,O1R2R
02/04/16,3O4O,
09/04/16,O,
15/04/16,1O2O,
16/04/16,O,O
22/04/16,1O,
23/04/16,1O4O,
24/04/16,,1O
30/04/16,1O,
01/05/16,,O
04/05/16,R,
05/05/16,O,
08/05/16,,R
13/05/16,O1R,
14/05/16,O,
16/05/16,,O
01/07/16,O1R,
02/07/16,O,
08/07/16,O1R,
09/07/16,R,
10/07/16,1O,
13/07/16,O1R,
14/07/16,O,
16/07/16,R,
17/07/16,,O
22/07/16,O1R,
23/07/16,R,O
29/07/16,R,O
30/07/16,N,O6R
31/07/16,O,
05/08/16,O,6O
06/08/16,N,O6N
07/08/16,6O,6O
12/08/16,6O,6O
13/08/16,R,R
14/08/16,6O,5O6O
19/08/16,,O
20/08/16,O6R,R
21/08/16,,O4R
22/08/16,,1O
26/08/16,,O
27/08/16,4O6O,R
28/08/16,,O
29/08/16,,1O
21/10/16,1O,
28/10/16,O1R,
29/10/16,1O,
01/11/16,,O
02/11/16,,1O
10/11/16,O,
11/11/16,1O,
13/11/16,,1O
16/12/16,O,
17/12/16,3O4O,
22/12/16,1O,
23/12/16,O1R,
24/12/16,O4R,
26/12/16,1O4O,1O
02/01/17,,O
1 date aller retour
2 02/01/16 1O
3 03/01/16 O
4 06/02/16 O4R
5 13/02/16 O4N 4O6O
6 19/02/16 O
7 20/02/16 O4N 3R4R
8 26/02/16 1O
9 27/02/16 1O4O O4R
10 05/03/16 4O 1O4O
11 25/03/16 O1R4R
12 26/03/16 O
13 28/03/16 O1R2R
14 02/04/16 3O4O
15 09/04/16 O
16 15/04/16 1O2O
17 16/04/16 O O
18 22/04/16 1O
19 23/04/16 1O4O
20 24/04/16 1O
21 30/04/16 1O
22 01/05/16 O
23 04/05/16 R
24 05/05/16 O
25 08/05/16 R
26 13/05/16 O1R
27 14/05/16 O
28 16/05/16 O
29 01/07/16 O1R
30 02/07/16 O
31 08/07/16 O1R
32 09/07/16 R
33 10/07/16 1O
34 13/07/16 O1R
35 14/07/16 O
36 16/07/16 R
37 17/07/16 O
38 22/07/16 O1R
39 23/07/16 R O
40 29/07/16 R O
41 30/07/16 N O6R
42 31/07/16 O
43 05/08/16 O 6O
44 06/08/16 N O6N
45 07/08/16 6O 6O
46 12/08/16 6O 6O
47 13/08/16 R R
48 14/08/16 6O 5O6O
49 19/08/16 O
50 20/08/16 O6R R
51 21/08/16 O4R
52 22/08/16 1O
53 26/08/16 O
54 27/08/16 4O6O R
55 28/08/16 O
56 29/08/16 1O
57 21/10/16 1O
58 28/10/16 O1R
59 29/10/16 1O
60 01/11/16 O
61 02/11/16 1O
62 10/11/16 O
63 11/11/16 1O
64 13/11/16 1O
65 16/12/16 O
66 17/12/16 3O4O
67 22/12/16 1O
68 23/12/16 O1R
69 24/12/16 O4R
70 26/12/16 1O4O 1O
71 02/01/17 O

View file

@ -0,0 +1,47 @@
import requests
import json
import time
import csv
import psycopg2
pg = psycopg2.connect("dbname=oedb")
db = pg.cursor()
api = 'http://localhost:8080'
with open('bisonfute2016.csv') as csvfile:
trafic = csv.DictReader(csvfile, delimiter=',', quotechar='"')
for row in trafic:
start = '20'+row['date'][6:8]+'/'+row['date'][3:5]+'/'+row['date'][0:2]+'T00:00:00CET'
stop = '20'+row['date'][6:8]+'/'+row['date'][3:5]+'/'+row['date'][0:2]+'T23:59:59CET'
for sens in ['aller','retour']:
if row[sens] != '':
if row[sens][0]>'A':
defaut = row[sens][0]
else:
defaut = ''
for zone in range(1,7):
if row[sens].find(str(zone)):
couleur = row[sens][row[sens].find(str(zone))+1]
else:
couleur = defaut
if couleur > 'A':
db.execute('SELECT ST_asgeojson(geom) FROM bison_fute_geo WHERE zone = %s', (str(zone),))
geo = db.fetchone()
if geo is not None:
what = 'traffic.forecast'
if couleur == 'O':
what = what + '.orange'
if couleur == 'R':
what = what + '.red'
if couleur == 'N':
what = what + '.black'
if sens == 'aller':
what = what + '.out'
if sens == 'retour':
what = what + '.return'
p = dict(type='forecast',what=what, start=start, stop=stop, source='http://www.bison-fute.gouv.fr')
geojson = json.dumps(dict(type='Feature',properties=p, geometry=json.loads(geo[0])))
r = requests.post(api+'/event', data = geojson, )
print("%s POST: %s %s" % (r.status_code, r.text,json.dumps(p)))

View file

@ -0,0 +1,28 @@
sudo apt install unzip ogr2ogr
wget "https://www.data.gouv.fr/api/1/datasets/r/0e117c06-248f-45e5-8945-0e79d9136165" -O communes.shp.zip
unzip -o communes-shp.zip
ogr2ogr -f postgresql PG:"dbname=oedb" communes.shp -nln communes -nlt geometry
psql oedb -c "
create index communes_insee on communes (insee);
alter table communes drop ogc_fid;
alter table communes drop surf_ha;
"
wget "https://www.data.gouv.fr/datasets/contours-des-departements-francais-issus-d-openstreetmap/#/resources/eb36371a-761d-44a8-93ec-3d728bec17ce" -O departements-shp.zip
unzip -o departements-shp.zip
ogr2ogr -f postgresql PG:"dbname=oedb" departements-20230101.shp -nln departements -nlt geometry
psql oedb -c "
create index departements_insee on departements (insee);
alter table departements drop ogc_fid;
alter table departements drop surf_ha;
"
rm departements-*
# créer la table bison futé et importer les données
sudo -u postgres psql oedb -c "create table bison_fute_zones (dep text, zone text);"
sudo -u postgres psql oedb -c "\copy bison_fute_zones from bisonfute2016.csv with (format csv, header true);"
sudo -u postgres psql oedb -c "create materialized view bison_fute_geo as select zone, st_snaptogrid(st_union(wkb_geometry),0.00001) as geom FROM bison_fute_zones b join departements d on (d.insee=b.dep) group by 1;"

View file

@ -0,0 +1,19 @@
HIERYY=$(date --date="-1 day" +'%Y')
HIERMM=$(date --date="-1 day" +'%m')
HIER=$(date --date="-1 day" +'%Y-%m-%d')
mkdir -p `date +%Y-%m`
f=`date +%Y-%m`/bison-fute-`date +%Y-%m-%dT%H:%M:00%z`.json
l="last.json"
py bisonfute.py > $f
# ~/.virtualenvs/oedb/bin/python parks.py
if diff $f $l >/dev/null
then
rm $f
touch -h $l
else
ln -fs $f $l
fi
# archivage des données de la veille
7z a $HIERYY.7z $HIERYY-$HIERMM/*$HIER* && rm $HIERYY-$HIERMM/*$HIER*