Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
F
flask-admin
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
JIRA
JIRA
Merge Requests
0
Merge Requests
0
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Python-Dev
flask-admin
Commits
780f1837
Commit
780f1837
authored
Dec 23, 2015
by
Serge S. Koval
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1154 from pricez/s3-storage
Adds S3 storage backend
parents
67e1da5a
1cf0e6c1
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
225 additions
and
5 deletions
+225
-5
AUTHORS
AUTHORS
+1
-0
advanced.rst
doc/advanced.rst
+18
-0
__init__.py
flask_admin/contrib/fileadmin/__init__.py
+7
-5
s3.py
flask_admin/contrib/fileadmin/s3.py
+199
-0
No files found.
AUTHORS
View file @
780f1837
...
...
@@ -22,5 +22,6 @@ Patches and Suggestions
- Artem Serga <artem@serga.name>
- Koblaid
- Julian Gonggrijp (UUDigitalHumanitieslab)
- Arthur de Paula Bressan (ArthurPBressan)
.. and more. If I missed you, let me know.
doc/advanced.rst
View file @
780f1837
...
...
@@ -79,9 +79,27 @@ can use it by adding a FileAdmin view to your app::
path = op.join(op.dirname(__file__), 'static')
admin.add_view(FileAdmin(path, '/static/', name='Static Files'))
FileAdmin also has out-of-the-box support for managing files located on a Amazon Simple Storage Service
bucket. To add it to your app::
from flask_admin import Admin
from flask_admin.contrib.fileadmin.s3 import S3FileAdmin
admin = Admin()
admin.add_view(S3FileAdmin('files_bucket', 'us-east-1', 'key_id', 'secret_key')
You can disable uploads, disable file deletion, restrict file uploads to certain types, etc.
Check :mod:`flask_admin.contrib.fileadmin` in the API documentation for more details.
Adding new file backends
************************
You can also implement your own storage backend by creating a class that implements the same
methods defined in the `LocalFileStorage` class. Check :mod:`flask_admin.contrib.fileadmin` in the
API documentation for details on the methods.
Adding A Redis Console
----------------------
...
...
flask_admin/contrib/fileadmin.py
→
flask_admin/contrib/fileadmin
/__init__
.py
View file @
780f1837
...
...
@@ -6,10 +6,8 @@ import re
import
shutil
from
operator
import
itemgetter
from
werkzeug
import
secure_filename
from
flask
import
flash
,
redirect
,
abort
,
request
,
send_file
from
werkzeug
import
secure_filename
from
wtforms
import
fields
,
validators
from
flask_admin
import
form
,
helpers
...
...
@@ -563,13 +561,17 @@ class BaseFileAdmin(BaseView, ActionsMixin):
If the path does not exist, this will also raise a 404 exception.
"""
base_path
=
self
.
get_base_path
()
if
path
is
None
:
directory
=
base_path
path
=
''
else
:
path
=
op
.
normpath
(
path
)
directory
=
op
.
normpath
(
self
.
_separator
.
join
([
base_path
,
path
]))
if
base_path
:
directory
=
self
.
_separator
.
join
([
base_path
,
path
])
else
:
directory
=
path
directory
=
op
.
normpath
(
directory
)
if
not
self
.
is_in_folder
(
base_path
,
directory
):
abort
(
404
)
...
...
flask_admin/contrib/fileadmin/s3.py
0 → 100644
View file @
780f1837
import
time
try
:
from
boto
import
s3
from
boto.s3.prefix
import
Prefix
from
boto.s3.key
import
Key
except
ImportError
:
s3
=
None
from
flask
import
redirect
from
flask_admin.babel
import
gettext
from
.
import
BaseFileAdmin
class
S3Storage
(
object
):
"""
Storage object representing files on an Amazon S3 bucket.
Usage::
from flask_admin.contrib.fileadmin import BaseFileAdmin
from flask_admin.contrib.fileadmin.s3 import S3Storage
class MyS3Admin(BaseFileAdmin):
# Configure your class however you like
pass
fileadmin_view = MyS3Admin(storage=S3Storage(...))
"""
def
__init__
(
self
,
bucket_name
,
region
,
aws_access_key_id
,
aws_secret_access_key
):
"""
Constructor
:param bucket_name:
Name of the bucket that the files are on.
:param region:
Region that the bucket is located
:param aws_access_key_id:
AWS Access Key ID
:param aws_secret_access_key:
AWS Secret Access Key
Make sure the credentials have the correct permissions set up on
Amazon or else S3 will return a 403 FORBIDDEN error.
"""
if
not
s3
:
raise
ValueError
(
'Could not import boto. You can install boto by '
'using pip install boto'
)
connection
=
s3
.
connect_to_region
(
region
,
aws_access_key_id
=
aws_access_key_id
,
aws_secret_access_key
=
aws_secret_access_key
)
self
.
bucket
=
connection
.
get_bucket
(
bucket_name
)
self
.
separator
=
'/'
def
get_files
(
self
,
path
,
directory
):
def
_strip_path
(
name
,
path
):
if
name
.
startswith
(
path
):
return
name
.
replace
(
path
,
''
,
1
)
return
name
def
_remove_trailing_slash
(
name
):
return
name
[:
-
1
]
def
_iso_to_epoch
(
timestamp
):
dt
=
time
.
strptime
(
timestamp
.
split
(
"."
)[
0
],
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S"
)
return
int
(
time
.
mktime
(
dt
))
files
=
[]
directories
=
[]
if
path
and
not
path
.
endswith
(
self
.
separator
):
path
+=
self
.
separator
for
key
in
self
.
bucket
.
list
(
path
,
self
.
separator
):
if
key
.
name
==
path
:
continue
if
isinstance
(
key
,
Prefix
):
name
=
_remove_trailing_slash
(
_strip_path
(
key
.
name
,
path
))
key_name
=
_remove_trailing_slash
(
key
.
name
)
directories
.
append
((
name
,
key_name
,
True
,
0
,
0
))
else
:
last_modified
=
_iso_to_epoch
(
key
.
last_modified
)
name
=
_strip_path
(
key
.
name
,
path
)
files
.
append
((
name
,
key
.
name
,
False
,
key
.
size
,
last_modified
))
return
directories
+
files
def
_get_bucket_list_prefix
(
self
,
path
):
parts
=
path
.
split
(
self
.
separator
)
if
len
(
parts
)
==
1
:
search
=
''
else
:
search
=
self
.
separator
.
join
(
parts
[:
-
1
])
+
self
.
separator
return
search
def
_get_path_keys
(
self
,
path
):
search
=
self
.
_get_bucket_list_prefix
(
path
)
return
{
key
.
name
for
key
in
self
.
bucket
.
list
(
search
,
self
.
separator
)}
def
is_dir
(
self
,
path
):
keys
=
self
.
_get_path_keys
(
path
)
return
path
+
self
.
separator
in
keys
def
path_exists
(
self
,
path
):
if
path
==
''
:
return
True
keys
=
self
.
_get_path_keys
(
path
)
return
path
in
keys
or
(
path
+
self
.
separator
)
in
keys
def
get_base_path
(
self
):
return
''
def
get_breadcrumbs
(
self
,
path
):
accumulator
=
[]
breadcrumbs
=
[]
for
n
in
path
.
split
(
self
.
separator
):
accumulator
.
append
(
n
)
breadcrumbs
.
append
((
n
,
self
.
separator
.
join
(
accumulator
)))
return
breadcrumbs
def
send_file
(
self
,
file_path
):
key
=
self
.
bucket
.
get_key
(
file_path
)
if
key
is
None
:
raise
ValueError
()
return
redirect
(
key
.
generate_url
(
3600
))
def
save_file
(
self
,
path
,
file_data
):
key
=
Key
(
self
.
bucket
,
path
)
key
.
set_contents_from_file
(
file_data
.
stream
)
def
delete_tree
(
self
,
directory
):
self
.
_check_empty_directory
(
directory
)
self
.
bucket
.
delete_key
(
directory
+
self
.
separator
)
def
delete_file
(
self
,
file_path
):
self
.
bucket
.
delete_key
(
file_path
)
def
make_dir
(
self
,
path
,
directory
):
dir_path
=
self
.
separator
.
join
([
path
,
(
directory
+
self
.
separator
)])
key
=
Key
(
self
.
bucket
,
dir_path
)
key
.
set_contents_from_string
(
''
)
def
_check_empty_directory
(
self
,
path
):
if
not
self
.
_is_directory_empty
(
path
):
raise
ValueError
(
gettext
(
'Cannot operate on non empty '
'directories'
))
return
True
def
rename_path
(
self
,
src
,
dst
):
if
self
.
is_dir
(
src
):
self
.
_check_empty_directory
(
src
)
src
+=
self
.
separator
dst
+=
self
.
separator
self
.
bucket
.
copy_key
(
dst
,
self
.
bucket
.
name
,
src
)
self
.
delete_file
(
src
)
def
_is_directory_empty
(
self
,
path
):
keys
=
self
.
_get_path_keys
(
path
+
self
.
separator
)
return
len
(
keys
)
==
1
class
S3FileAdmin
(
BaseFileAdmin
):
"""
Simple Amazon Simple Storage Service file-management interface.
:param bucket_name:
Name of the bucket that the files are on.
:param region:
Region that the bucket is located
:param aws_access_key_id:
AWS Access Key ID
:param aws_secret_access_key:
AWS Secret Access Key
Sample usage::
from flask_admin import Admin
from flask_admin.contrib.fileadmin.s3 import S3FileAdmin
admin = Admin()
admin.add_view(S3FileAdmin('files_bucket', 'us-east-1', 'key_id', 'secret_key')
"""
def
__init__
(
self
,
bucket_name
,
region
,
aws_access_key_id
,
aws_secret_access_key
,
*
args
,
**
kwargs
):
storage
=
S3Storage
(
bucket_name
,
region
,
aws_access_key_id
,
aws_secret_access_key
)
super
(
S3FileAdmin
,
self
)
.
__init__
(
*
args
,
storage
=
storage
,
**
kwargs
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment