Browse Source

Add support for fetching most of a datasheet

pull/1/head
Angelo DiNardi 3 years ago
parent
commit
ee95eea4a9
  1. 187
      app.py
  2. 1
      load.sql

187
app.py

@ -116,6 +116,7 @@ class GuteNestLocationData(TypedDict, total=False): @@ -116,6 +116,7 @@ class GuteNestLocationData(TypedDict, total=False):
includedOnMap: bool
class DataSheetData(TypedDict, total=False):
id: str
submitterName: str
colonyId: int
date: str
@ -135,7 +136,7 @@ class DataSheetData(TypedDict, total=False): @@ -135,7 +136,7 @@ class DataSheetData(TypedDict, total=False):
@app.route('/data/sheet/save', methods=['POST'])
def save_sheet():
data = request.get_json() # type: DataSheetData
data: DataSheetData = request.get_json()
entered_by_name = str(data['submitterName'])
colony_id = int(data['colonyId'])
@ -344,19 +345,203 @@ def save_sheet(): @@ -344,19 +345,203 @@ def save_sheet():
INSERT INTO gute_nest_location_data (
data_sheet_id,
index,
subcolony,
location,
included_on_map
) VALUES (
%(data_sheet_id)s,
%(index)s,
%(subcolony)s,
%(location)s,
%(included_on_map)s
)
""", {
'data_sheet_id': data_sheet_id,
'index': idx,
'subcolony': subcolony,
'location': location,
'included_on_map': included_on_map,
})
return jsonify(data)
@app.route('/data/sheet/load', methods=['POST'])
def load_sheet():
data_sheet_id = request.form.get("data_sheet_id")
with get_db() as db:
with db.cursor() as cur:
cur.execute("""
SELECT
id,
colony_id,
date,
start_time,
end_time,
nest_visibility,
visibility_comments,
significant_changes,
significant_change_notes,
human_disturbance,
human_disturbance_notes,
additional_observations,
entered_by_name
FROM data_sheet
WHERE id = %(data_sheet_id)s
""", {
"data_sheet_id": data_sheet_id,
})
if cur.rowcount == 0:
raise ValueError("data_sheet_id doesn't exist")
row = cur.fetchone()
data_sheet: DataSheetData = {
"id": row.id,
"colonyId": row.colony_id,
"date": row.date, # TODO: convert to just a date (w/o time)
# "startTime": row.start_time,
# "endTime": row.end_time,
"nestVisibility": row.nest_visibility,
"visibilityComments": row.visibility_comments,
"colonySignificantChanges": row.significant_changes,
"colonySignificantChangesNotes": row.significant_change_notes,
"colonyHumanDisturbance": row.human_disturbance,
"colonyHumanDisturbanceNotes": row.human_disturbance_notes,
"colonyAdditionalObservations": row.additional_observations,
}
# SURVEY SUMMARY DATA
cur.execute("""
SELECT
id,
species_code,
total_nests,
total_adults,
total_young,
total_possible_nests
FROM survey_summary
WHERE data_sheet_id = %(data_sheet_id)s
""", {
"data_sheet_id": data_sheet_id,
})
survey_summary: Dict[str, SurveySummary] = {}
for row in cur:
summary: SurveySummary = {
"totalNests": row.total_nests,
"totalAdults": row.total_adults,
"totalYoung": row.total_young,
"totalPossibleNests": row.total_possible_nests,
}
survey_summary[row.species_code] = summary
data_sheet['surveySummary'] = survey_summary
# HEP NEST DATA
cur.execute("""
SELECT
nest_number,
focal,
species_code,
active,
stage,
adult_count,
chick_count,
chick_confidence,
comments
FROM hep_nest_data
WHERE data_sheet_id = %(data_sheet_id)s
ORDER BY nest_number
""", {
"data_sheet_id": data_sheet_id,
})
hep_nest_data: List[HepNestData] = []
for row in cur:
hep_data: HepNestData = {
"number": row.nest_number,
"isFocal": row.focal,
"speciesCode": row.species_code,
"active": row.active,
"stage": row.stage,
"adultCount": row.adult_count,
"chickCount": row.chick_count,
"chickConfidence": row.chick_confidence,
"comments": row.comments,
}
hep_nest_data.append(hep_data)
data_sheet['hepNestData'] = hep_nest_data
# GUTE NEST DATA
cur.execute("""
SELECT
index,
subcolony,
species_code,
total_adults,
stage_0_adults,
stage_1_nests,
stage_2_chicks,
stage_3_chicks,
stage_4_chicks,
comments
FROM gute_nest_data
WHERE data_sheet_id = %(data_sheet_id)s
ORDER BY index
""", {
"data_sheet_id": data_sheet_id,
})
gute_nest_data: List[GuteNestData] = []
for row in cur:
gute: GuteNestData = {
"subcolony": row.subcolony,
"speciesCode": row.species_code,
"totalAdults": row.total_adults,
"stage0Adults": row.stage_0_adults,
"stage1Nests": row.stage_1_nests,
"stage2Chicks": row.stage_2_chicks,
"stage3Chicks": row.stage_3_chicks,
"stage4Chicks": row.stage_4_chicks,
"comments": row.comments,
}
gute_nest_data.append(gute)
data_sheet['guteNestData'] = gute_nest_data
# Gute Nest Location Data
gute_nest_location_data: List[GuteNestLocationData] = []
cur.execute("""
SELECT
index,
subcolony,
location,
included_on_map
FROM gute_nest_location_data
WHERE data_sheet_id = %(data_sheet_id)s
ORDER BY index
""", {
"data_sheet_id": data_sheet_id,
})
for row in cur:
gute_nest_location: GuteNestLocationData = {
"subcolony": row.subcolony,
"location": row.location,
"includedOnMap": row.included_on_map,
}
gute_nest_location_data.append(gute_nest_location)
data_sheet['guteNestLocationData'] = gute_nest_location_data
return data_sheet

1
load.sql

@ -364,6 +364,7 @@ create table gute_nest_location_data ( @@ -364,6 +364,7 @@ create table gute_nest_location_data (
id uuid NOT NULL DEFAULT gen_random_uuid() PRIMARY KEY,
data_sheet_id uuid NOT NULL,
index int NOT NULL,
subcolony text NOT NULL,
location text NOT NULL,
included_on_map boolean NOT NULL
);

Loading…
Cancel
Save