async def get_bytes(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.read()
An image upload
@app.route('/analyze', methods=['POST'])
async def analyze(request):
img_data = await request.form()
img_bytes = await (img_data['file'].read())
pred = learn.predict(img_bytes)[0]
return JSONResponse({
'results': str(pred)
})
A URL
@app.route('/analyze', methods=['POST'])
async def analyze(request):
img_bytes = await get_bytes(request.query_params["url"])
pred = learn.predict(img_bytes)[0]
return JSONResponse({
'results' : str(pred)
})
A zip file (see below on how to upload a zip
or other file
import zipfile
import csv
@app.route('/analyze', methods=['POST'])
async def analyze(request):
data = await request.form()
content = data['content']
zip_ref = zipfile.ZipFile(content, 'r')
mkdir('Downloaded_Images')
zipref.extractall('Downloaded_Images')
zip_ref.close()
path = Path('Downloaded_Images')
imgs = get_image_files(path)
learn = load_learner(path/export_file_name)
dl = test_dl(learn.dls, imgs)
_, __, preds = learn.get_preds(dl=dl, with_decoded=True)
rm -r 'Downloaded_Images'
resultsFile = open('results.csv', 'wb')
wr = csv.writer(resultsFile)
wr.writerows([preds])
return FileResponse('results.csv')
Parsing a csv with image urls
import csv
import StringIO
@app.route('/analyze', methods=['POST'])
async def analyze(request):
data = await request.form()
content = await (data['file'].read())
s = str(content, 'utf-8')
data = StringIO(s)
mkdir('Downloaded_Images')
download_images('Downloaded_Images', urls=data)
path = Path('Downloaded_Images')
learn = load_learner(path/export_file_name)
imgs = get_image_files(path)
dl = test_dl(learn.dls, imgs)
_, __, preds = learn.get_preds(dl=dl, with_decoded=True)
rm -r 'Downloaded_Images'
resultsFile = open('results.csv', 'wb')
wr = csv.writer(resultsFile)
wr.writerows([preds])
return FileResponse('results.csv')
Tabular is different. Most work will be done by sending large chuncks of data for analysis. Let's recreate what we did, but load it into Pandas
import StringIO
import csv
@app.route('/analyze', methods=['POST'])
async def analyze(request):
data = await request.form()
content = await (data['file'].read())
s = str(content, 'utf-8')
data = StringIO(s)
df = pd.read_csv(data)
learn = load_learner(path/export_file_name)
# if we want to do GPU:
# learn.model = learn.model.cuda()
dl = learn.dls.train_dl.new(df)
_, __, y = learn.get_preds(dl=dl, with_decoded=True)
df['Predictions'] = y
# if we want to store the results
path_res = Path('app/static/')
df.to_csv(path_res/'results.csv')
return FileResponse('results.csv', media_type='csv')
We need to adjust the JavaScript to accept a form:
client.js
:
function analyze(){
var uploadFiles = el('file-input').files;
if (uploadFiles.length < 1) alert('Please select 1 file to analyze!');
el('analyze-button').innerHTML = 'Analyzing...';
var xhr = new XMLHttpRequest();
var loc = window.location
xhr.open('POST', `${loc.protocol}//${loc.hostname}:${loc.port}/analyze`, true);
xhr.onerror = function() {alert (xhr.responseText);}
xhr.onload = function(e) {
if (this.readyState === 4) {
el("result-label").innerHTML = `Result = Good`;
download('results.csv', 'results.csv');
xhr.send();
}
el("analyze-button").innerHTML = "Analyze";
};
var fileData = new FormData();
fileData.append("file", uploadFiles[0]);
xhr.send(fileData);
}
}
@app.route('/analyze', methods=['POST'])
async def analyze(request):
data = await request.form()
content = data['content']
pred = learn.predict(content)[0]
return JSONResponse({'result': pred})