Skip to content

Commit

Permalink
Prepopulate sample/recon with metadata from previous entry
Browse files Browse the repository at this point in the history
  • Loading branch information
WardLT committed Feb 7, 2018
1 parent 2fc79db commit 91d4c0a
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 28 deletions.
2 changes: 1 addition & 1 deletion nucapt/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class APTSampleForm(Form):
title='Only word characters allowed: A-Z, a-z, 0-9, and _'),
validators=[Regexp('\\w+', message='File name can only contain word '
'characters: A-Z, a-z, 0-9, and _')])
sample_form = FormField(APTSampleDescriptionForm, description="Metadata for the ")
sample_form = FormField(APTSampleDescriptionForm, description="Metadata that describes the sample")
collection_form = FormField(APTCollectionMethodForm, description="Metadata for data collection method")
preparation_form = FormField(APTSamplePreparationForm, description="Metadata for sample preparation")
rhit_file = FileField('RHIT file', validators=[Optional()])
Expand Down
1 change: 1 addition & 0 deletions nucapt/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ def list_reconstructions(self):
:return:
- list of APTSampleDirectory, reconstructions
- list of dict, metadata for each reconstruction
- list of str, errors"""

# Find all subdirectories that contain "SampleInformation.yaml"
Expand Down
93 changes: 68 additions & 25 deletions nucapt/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def create():
form = DatasetForm(request.form)
if request.method == 'POST' and form.validate():
dataset = APTDataDirectory.initialize_dataset(form)
return redirect('/dataset/%s'%dataset.name)
return redirect('/dataset/%s' % dataset.name)
return render_template('dataset_create.html', title=title, description=description, form=form,
navbar=[('Create Dataset', '#')])

Expand All @@ -157,7 +157,8 @@ def edit_dataset(dataset_name):
dataset.update_metadata(form)
return redirect('/dataset/' + dataset_name)
else:
return render_template('dataset_create.html', title=title, description=description, form=form, navbar=navbar)
return render_template('dataset_create.html', title=title, description=description, form=form,
navbar=navbar)
else:
form = DatasetForm(**dataset.get_metadata().metadata)
return render_template('dataset_create.html', title=title, description=description, form=form, navbar=navbar)
Expand All @@ -173,7 +174,8 @@ def display_dataset(dataset_name):
except DatasetParseException as exc:
dataset = None
errors = exc.errors
return render_template('dataset.html', name=dataset_name, dataset=dataset, errors=errors, navbar=[(dataset_name, '/dataset/%s' % dataset_name)])
return render_template('dataset.html', name=dataset_name, dataset=dataset, errors=errors,
navbar=[(dataset_name, '/dataset/%s' % dataset_name)])
samples, sample_errors = dataset.list_samples()
errors.extend(sample_errors)
metadata = dataset.get_metadata()
Expand Down Expand Up @@ -209,9 +211,9 @@ def publish_dataset(dataset_name):

# Create the PublicationClient
globus_publish_client = DataPublicationClient(authorizer=
RefreshTokenAuthorizer(
session["tokens"]["publish.api.globus.org"]
["refresh_token"], load_portal_client()))
RefreshTokenAuthorizer(
session["tokens"]["publish.api.globus.org"]
["refresh_token"], load_portal_client()))

# Create the transfer client
mdf_transfer_client = TransferClient(authorizer=
Expand All @@ -232,7 +234,7 @@ def publish_dataset(dataset_name):
# Transfer data
try:
# '/' of the Globus endpoint for the working data is the working data path
data_path = '/%s/'%(os.path.relpath(data.path, app.config['WORKING_PATH']))
data_path = '/%s/' % (os.path.relpath(data.path, app.config['WORKING_PATH']))
toolbox.quick_transfer(mdf_transfer_client, app.config["WORKING_DATA_ENDPOINT"],
pub_endpoint, [(data_path, pub_path)], timeout=0)
except Exception as e:
Expand Down Expand Up @@ -266,7 +268,7 @@ def list_datasets():
"""List all datasets currently stored at default data path"""

dir_info = APTDataDirectory.get_all_datasets(app.config['WORKING_PATH'])
dir_valid = dict([(dir, isinstance(info,APTDataDirectory)) for dir,info in dir_info.items()])
dir_valid = dict([(dir, isinstance(info, APTDataDirectory)) for dir, info in dir_info.items()])
return render_template("dataset_list.html", dir_info=dir_info, dir_valid=dir_valid,
navbar=[('List Datasets', '#')])

Expand All @@ -286,9 +288,26 @@ def create_sample(dataset_name):
return redirect('/dataset/' + dataset_name)

# Initialize form data
form = APTSampleForm(request.form) \
if request.method == 'POST' \
else APTSampleForm(sample_name='Sample%d'%(len(dataset.list_samples()[0])+1))
if request.method == 'POST':
form = APTSampleForm(request.form)
else:
samples, errors = dataset.list_samples()

# Make a new name
new_metadata = {'sample_name': 'Sample%d' % (len(samples) + 1)}

if len(samples) > 0:
# Copy data from another sample
last_sample = sorted(samples, key=lambda x: x.name)[-1]

# Loop over each subfield
for n, m in zip(['sample_form', 'collection_form', 'preparation_form'],
[last_sample.load_sample_information(), last_sample.load_collection_metadata(),
last_sample.load_preparation_metadata()]):
new_metadata[n] = m.metadata

# Initialize the form
form = APTSampleForm(**new_metadata)

if request.method == 'POST' and form.validate():
# attempt to validate the metadata
Expand Down Expand Up @@ -324,13 +343,14 @@ def create_sample(dataset_name):
def view_sample(dataset_name, sample_name):
"""View metadata about sample"""

navbar = [(dataset_name, '/dataset/%s'%dataset_name), (sample_name, '#')]
navbar = [(dataset_name, '/dataset/%s' % dataset_name), (sample_name, '#')]

# Load in the sample by name
try:
sample = APTSampleDirectory.load_dataset_by_name(dataset_name, sample_name)
except DatasetParseException as exc:
return render_template('sample.html', dataset_name=dataset_name, sample=sample, errors=exc.errors, navbar=navbar)
return render_template('sample.html', dataset_name=dataset_name, sample=sample, errors=exc.errors,
navbar=navbar)

# Load in the dataset
is_published = APTDataDirectory.load_dataset_by_name(dataset_name).is_published()
Expand Down Expand Up @@ -365,7 +385,7 @@ def edit_sample_information(dataset_name, sample_name):
try:
sample = APTSampleDirectory.load_dataset_by_name(dataset_name, sample_name)
except DatasetParseException as exc:
return redirect("/dataset/%s/sample/%s"%(dataset_name, sample_name))
return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name))

# Load in the metadata
edit_page = 'sample_generalform.html'
Expand All @@ -386,7 +406,7 @@ def edit_collection_information(dataset_name, sample_name):
try:
sample = APTSampleDirectory.load_dataset_by_name(dataset_name, sample_name)
except DatasetParseException as exc:
return redirect("/dataset/%s/sample/%s"%(dataset_name, sample_name))
return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name))

# Load in the metadata
edit_page = 'sample_collectionform.html'
Expand All @@ -407,7 +427,7 @@ def edit_sample_preparation(dataset_name, sample_name):
try:
sample = APTSampleDirectory.load_dataset_by_name(dataset_name, sample_name)
except DatasetParseException as exc:
return redirect("/dataset/%s/sample/%s"%(dataset_name, sample_name))
return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name))

# Load in the metadata
edit_page = 'sample_prepform.html'
Expand Down Expand Up @@ -478,12 +498,34 @@ def create_reconstruction(dataset_name, sample_name):
try:
sample = APTSampleDirectory.load_dataset_by_name(dataset_name, sample_name)
except DatasetParseException as exc:
return redirect("/dataset/%s/sample/%s"%(dataset_name, sample_name))
return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name))

# Create the form
form = AddAPTReconstructionForm(request.form) \
if request.method == 'POST' \
else AddAPTReconstructionForm(name='Reconstruction%d'%(len(sample.list_reconstructions()[0]) + 1))
if request.method == 'POST':
form = AddAPTReconstructionForm(request.form)
else:
# Load the existing reconstructions
recons, _, _ = sample.list_reconstructions()

# Populate the metadata
new_metadata = dict(name='Reconstruction%d'%(len(recons) + 1))

if len(recons) == 0:
# Try to find another sample
samples, _ = APTDataDirectory.load_dataset_by_name(dataset_name).list_samples()
for sample in sorted(samples, key=lambda x: x.name)[::-1]:
my_recons, _, _ = sample.list_reconstructions()
if len(my_recons) > 0:
recons = my_recons
break

# If you can find a reconstruction, prepopulate the form
if len(recons) > 0:
old_metadata = sorted(recons, key=lambda x: x.name)[-1].load_metadata()
new_metadata.update(old_metadata.metadata)

# Create the form
form = AddAPTReconstructionForm(**new_metadata)

# Make sure it validates
if request.method == 'POST' and form.validate():
Expand Down Expand Up @@ -521,7 +563,7 @@ def create_reconstruction(dataset_name, sample_name):
rrng_file.save(os.path.join(recon.path, secure_filename(rrng_file.filename)))
if 'tip_image' in request.files:
tip_image = request.files['tip_image']
tip_image.save(os.path.join(recon.path, 'tip_image.%s'%(tip_image.filename.split(".")[-1])))
tip_image.save(os.path.join(recon.path, 'tip_image.%s' % (tip_image.filename.split(".")[-1])))

return redirect("/dataset/%s/sample/%s/recon/%s" % (dataset_name, sample_name, recon_name))

Expand Down Expand Up @@ -591,15 +633,16 @@ def add_analysis_data(dataset_name, sample_name, recon_name):
analysis_name = APTAnalysisDirectory.create_analysis_directory(form, dataset_name, sample_name, recon_name)

# Upload the data
analysis_name = APTAnalysisDirectory.load_dataset_by_name(dataset_name, sample_name, recon_name, analysis_name)
analysis_name = APTAnalysisDirectory.load_dataset_by_name(dataset_name, sample_name, recon_name,
analysis_name)
files = request.files.getlist('files')
if len(files) > 0:
flash('Uploaded %d files:'%len(files) + " ".join([os.path.basename(x.filename) for x in files]),
flash('Uploaded %d files:' % len(files) + " ".join([os.path.basename(x.filename) for x in files]),
category='success')
for file in files:
file.save(os.path.join(analysis_name.path, secure_filename(file.filename)))

return redirect("/dataset/%s/sample/%s/recon/%s"%(dataset_name, sample_name, recon_name))
return redirect("/dataset/%s/sample/%s/recon/%s" % (dataset_name, sample_name, recon_name))

except DatasetParseException as err:
errors.append(err.errors)
Expand Down Expand Up @@ -645,7 +688,7 @@ def edit_analysis_metadata(dataset_name, sample_name, recon_name, analysis_name)
# Upload new files
files = request.files.getlist('files')
if len(files) > 0:
flash('Uploaded %d files:'%len(files) + " ".join([os.path.basename(x.filename) for x in files]),
flash('Uploaded %d files:' % len(files) + " ".join([os.path.basename(x.filename) for x in files]),
category='success')
for file in files:
file.save(os.path.join(analysis.path, secure_filename(file.filename)))
Expand Down
31 changes: 29 additions & 2 deletions tests/test_website.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,14 @@ def test_sample_method(self):
rv = self.app.get('/dataset/%s/sample/Sample3'%dataset_name)
self.assertEquals(200, rv.status_code)

# Make sure the form is prepopulated with data from a previous sample
rv = self.app.get('/dataset/%s/sample/create'%dataset_name)
self.assertEquals(200, rv.status_code)

soup = BeautifulSoup(rv.data, 'html.parser')
field = soup.find('input', {'id': 'preparation_form-electropolish-0-solution'})
self.assertEquals('water', field['value'])

def test_reconstructions(self):
"""Test dealing with reconstructions"""

Expand Down Expand Up @@ -284,6 +292,25 @@ def test_reconstructions(self):
self.assertEquals(200, rv.status_code)
self.assertIn(b'Recon1', rv.data)

# See if the form pre-populates
rv = self.app.get('/dataset/%s/sample/%s/recon/create'%(dataset_name, sample_name))
self.assertEquals(200, rv.status_code)

soup = BeautifulSoup(rv.data, 'html.parser')
field = soup.find('textarea', {'name': 'description'})
self.assertEquals('Example reconstruction', field.contents[0])

# Now, create a new sample and see if the reconstruction files it
self.create_sample(dataset_name, 'Sample2')

rv = self.app.get('/dataset/%s/sample/%s/recon/create' % (dataset_name, 'Sample2'))
self.assertEquals(200, rv.status_code)

soup = BeautifulSoup(rv.data, 'html.parser')
field = soup.find('textarea', {'name': 'description'})
self.assertEquals('Example reconstruction', field.contents[0])


def test_add_analysis(self):
"""Test dealing with adding analysis data"""

Expand Down Expand Up @@ -336,8 +363,8 @@ def test_add_analysis(self):
analysis_data['files'] = [(BytesIO(b'<junk>'), 'new_data.dat')]

rv = self.app.post('/dataset/%s/sample/%s/recon/%s/analysis/%s/edit' % (dataset_name, sample_name,
recon_name, analysis_name),
data=analysis_data)
recon_name, analysis_name),
data=analysis_data)
self.assertEquals(302, rv.status_code)
self.assertTrue(os.path.isfile(os.path.join(analysis.path, 'new_data.dat')))
metadata = analysis.load_metadata()
Expand Down

0 comments on commit 91d4c0a

Please sign in to comment.