Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
F
flask-admin
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
JIRA
JIRA
Merge Requests
0
Merge Requests
0
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Python-Dev
flask-admin
Commits
8f66eedf
Commit
8f66eedf
authored
Dec 22, 2015
by
Arthur Bressan
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Adds S3 storage backend
parent
e146f3ff
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
157 additions
and
3 deletions
+157
-3
fileadmin.py
flask_admin/contrib/fileadmin.py
+157
-3
No files found.
flask_admin/contrib/fileadmin.py
View file @
8f66eedf
...
...
@@ -6,10 +6,16 @@ import re
import
shutil
from
operator
import
itemgetter
from
werkzeug
import
secure_filename
try
:
from
boto
import
s3
from
boto.s3.prefix
import
Prefix
from
boto.s3.key
import
Key
except
ImportError
:
s3
=
None
import
dateutil.parser
from
flask
import
flash
,
redirect
,
abort
,
request
,
send_file
from
werkzeug
import
secure_filename
from
wtforms
import
fields
,
validators
from
flask_admin
import
form
,
helpers
...
...
@@ -19,6 +25,154 @@ from flask_admin.actions import action, ActionsMixin
from
flask_admin.babel
import
gettext
,
lazy_gettext
class
S3Storage
(
object
):
"""
Storage object representing files on an Amazon S3 bucket.
Usage::
from flask_admin.contrib.fileadmin import BaseFileAdmin, S3Storage
class MyS3Admin(BaseFileAdmin):
# Configure your class however you like
pass
fileadmin_view = MyS3Admin(storage=S3Storage(...))
"""
def
__init__
(
self
,
bucket_name
,
region
,
aws_access_key_id
,
aws_secret_access_key
):
"""
Constructor
:param bucket_name:
Name of the bucket that the files are on.
:param region:
Region that the bucket is located
:param aws_access_key_id:
AWS Access Key ID
:param aws_secret_access_key:
AWS Secret Access Key
Make sure the credentials have the correct permissions set up on
Amazon or else S3 will return a 403 FORBIDDEN error.
"""
if
not
s3
:
raise
ValueError
(
'Could not import boto. You can install boto by '
'using pip install boto'
)
connection
=
s3
.
connect_to_region
(
region
,
aws_access_key_id
=
aws_access_key_id
,
aws_secret_access_key
=
aws_secret_access_key
)
self
.
bucket
=
connection
.
get_bucket
(
bucket_name
)
def
get_files
(
self
,
path
,
directory
):
def
_strip_path
(
name
,
path
):
if
name
.
startswith
(
path
):
return
name
.
replace
(
path
,
''
,
1
)
return
name
def
_remove_trailing_slash
(
name
):
return
name
[:
-
1
]
files
=
[]
directories
=
[]
if
path
and
not
path
.
endswith
(
'/'
):
path
+=
'/'
for
key
in
self
.
bucket
.
list
(
path
,
'/'
):
if
key
.
name
==
path
:
continue
if
isinstance
(
key
,
Prefix
):
name
=
_remove_trailing_slash
(
_strip_path
(
key
.
name
,
path
))
key_name
=
_remove_trailing_slash
(
key
.
name
)
directories
.
append
((
name
,
key_name
,
True
,
0
,
0
))
else
:
last_modified
=
int
(
dateutil
.
parser
.
parse
(
key
.
last_modified
)
.
strftime
(
'
%
s'
))
name
=
_strip_path
(
key
.
name
,
path
)
files
.
append
((
name
,
key
.
name
,
False
,
key
.
size
,
last_modified
))
return
directories
+
files
def
_get_bucket_list_prefix
(
self
,
path
):
parts
=
path
.
split
(
'/'
)
if
len
(
parts
)
==
1
:
search
=
''
else
:
search
=
'/'
.
join
(
parts
[:
-
1
])
+
'/'
return
search
def
_get_path_keys
(
self
,
path
):
search
=
self
.
_get_bucket_list_prefix
(
path
)
return
{
key
.
name
for
key
in
self
.
bucket
.
list
(
search
,
'/'
)}
def
is_dir
(
self
,
path
):
keys
=
self
.
_get_path_keys
(
path
)
return
path
+
'/'
in
keys
def
path_exists
(
self
,
path
):
if
path
==
''
:
return
True
keys
=
self
.
_get_path_keys
(
path
)
return
path
in
keys
or
path
+
'/'
in
keys
def
get_base_path
(
self
):
return
''
def
get_breadcrumbs
(
self
,
path
):
accumulator
=
[]
breadcrumbs
=
[]
for
n
in
path
.
split
(
'/'
):
accumulator
.
append
(
n
)
breadcrumbs
.
append
((
n
,
'/'
.
join
(
accumulator
)))
return
breadcrumbs
def
send_file
(
self
,
file_path
):
key
=
self
.
bucket
.
get_key
(
file_path
)
if
key
is
None
:
raise
ValueError
()
return
redirect
(
key
.
generate_url
(
3600
))
def
save_file
(
self
,
path
,
file_data
):
key
=
Key
(
self
.
bucket
,
path
)
key
.
set_contents_from_file
(
file_data
.
stream
)
def
delete_tree
(
self
,
directory
):
self
.
_check_empty_directory
(
directory
)
self
.
bucket
.
delete_key
(
directory
+
'/'
)
def
delete_file
(
self
,
file_path
):
self
.
bucket
.
delete_key
(
file_path
)
def
make_dir
(
self
,
path
,
directory
):
dir_path
=
'/'
.
join
([
path
,
directory
+
'/'
])
key
=
Key
(
self
.
bucket
,
dir_path
)
key
.
set_contents_from_string
(
''
)
def
_check_empty_directory
(
self
,
path
):
if
not
self
.
_is_directory_empty
(
path
):
raise
ValueError
(
gettext
(
'Cannot operate on non empty '
'directories'
))
return
True
def
rename_path
(
self
,
src
,
dst
):
if
self
.
is_dir
(
src
):
self
.
_check_empty_directory
(
src
)
src
+=
'/'
dst
+=
'/'
self
.
bucket
.
copy_key
(
dst
,
self
.
bucket
.
name
,
src
)
self
.
delete_file
(
src
)
def
_is_directory_empty
(
self
,
path
):
keys
=
self
.
_get_path_keys
(
path
+
'/'
)
return
len
(
keys
)
==
1
class
LocalFileStorage
(
object
):
def
__init__
(
self
,
base_path
):
"""
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment