-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathprepare.py
More file actions
71 lines (60 loc) · 2.7 KB
/
prepare.py
File metadata and controls
71 lines (60 loc) · 2.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import os
import pandas as pd
from metrics.volunteers.data import load_raw_data
from metrics.volunteers.setup import VIEW_DIR
from metrics.volunteers.summarise import (summarise_by_local_authority,
summarise_by_ward, summarise_by_week)
from metrics.volunteers.transform_shifts import SHIFT_DATA
def summarise():
data = load_raw_data()
data = data[(data.current == True) & ~(data.checkpoint == 'Volunteer Hosts')]
print(data)
summarise_by_ward(data, os.path.join(VIEW_DIR, 'by_ward.csv'))
summarise_by_week(data, os.path.join(VIEW_DIR, 'by_week.csv'))
summarise_by_local_authority(data, os.path.join(VIEW_DIR, 'by_local_authority.csv'), os.path.join(
VIEW_DIR, 'la_stats.json'), os.path.join(VIEW_DIR, 'west_yorkshire.csv'))
data.reset_index().groupby('status').hash.count().loc[['confirmed']].to_json(os.path.join(VIEW_DIR, 'headlines.json'))
def prepare_shift_data():
os.makedirs(os.path.join(VIEW_DIR, 'shifts'), exist_ok=True)
shifts = pd.read_csv(SHIFT_DATA, parse_dates=['date']).rename(columns={
'attended': 'volunteer_shifts',
})
shifts.event_type = shifts.event_type.fillna(
'volunteer_event_programme'
).map(
lambda x: x.lower().replace(' ', '_')
)
grouped = pd.concat([
shifts[[
'date',
'volunteer_shifts',
'volunteer_hours',
]],
shifts[~shifts.event_type.isin(['volunteer_event_programme'])][[
'date',
'volunteer_shifts',
'volunteer_hours',
]].rename(columns={
'volunteer_shifts': 'volunteer_event_shifts',
'volunteer_hours': 'volunteer_event_hours',
})
]).groupby('date')
by_week = grouped.sum().resample('W-FRI').sum().round().astype(int)
by_week['cumulative_volunteer_shifts'] = by_week.volunteer_shifts.cumsum()
by_week['cumulative_volunteer_hours'] = by_week.volunteer_hours.cumsum()
by_week['cumulative_volunteer_event_shifts'] = by_week.volunteer_event_shifts.cumsum()
by_week['cumulative_volunteer_event_hours'] = by_week.volunteer_event_hours.cumsum()
by_week.to_csv(os.path.join(VIEW_DIR, 'shifts', 'by_week.csv'))
summary = shifts[[
'event_type',
'volunteer_shifts',
'volunteer_hours'
]]
summary = summary.groupby('event_type').sum().round().astype(int)
summary.loc['total'] = summary.sum()
summary.loc['total_events'] = summary[summary.index.isin(['leeds_2023_events', 'partner_events'])].sum()
summary.transpose().to_json(os.path.join(VIEW_DIR, 'shifts',
'summary.json'), orient="index", indent=2)
if __name__ == "__main__":
summarise()
prepare_shift_data()